From 8a33bc923de38d14a4eef0e548c3faf9eef6fa7d Mon Sep 17 00:00:00 2001 From: Tosone Date: Sun, 6 Aug 2023 12:58:23 +0800 Subject: [PATCH] :sparkles: Support image builder --- .gitignore | 2 + .golangci.yml | 2 +- cmd/builder/builder.go | 41 +- cmd/builder/checker.go | 14 +- cmd/imports/apis.go | 1 + cmd/imports/builder.go | 23 + cmd/imports/daemon.go | 1 + cmd/root.go | 4 + conf/config-dev.yaml | 12 +- conf/config.yaml | 12 +- pkg/builder/builder.go | 115 +++++ pkg/builder/docker/docker.go | 172 +++++++ pkg/builder/docker/docker_test.go | 37 ++ pkg/builder/docker/informer.go | 155 ++++++ pkg/builder/k8s/k8s.go | 15 + pkg/builder/logger/database/database.go | 105 ++++ pkg/builder/logger/doc.go | 17 + pkg/builder/logger/logger.go | 54 +++ pkg/builder/logger/obs/obs.go | 91 ++++ pkg/builder/podman/podman.go | 15 + pkg/cmds/server/server.go | 7 +- pkg/cmds/worker/worker.go | 8 +- pkg/configs/checker.go | 31 ++ pkg/configs/configuration.go | 183 +++++-- pkg/consts/topics.go | 2 + pkg/daemon/builder/builder.go | 112 +++++ pkg/daemon/daemon.go | 1 + pkg/daemon/gc/gc.go | 4 +- pkg/dal/cmd/gen.go | 2 +- pkg/dal/dao/builder.go | 45 +- .../migrations/mysql/0001_initialize.up.sql | 76 +-- .../postgresql/0001_initialize.up.sql | 54 ++- .../migrations/sqlite3/0001_initialize.up.sql | 80 ++-- pkg/dal/models/builder.go | 26 +- pkg/dal/query/builder_logs.gen.go | 70 ++- pkg/dal/query/builder_runners.gen.go | 453 ++++++++++++++++++ pkg/dal/query/builders.gen.go | 159 +++++- pkg/dal/query/gen.go | 16 +- pkg/handlers/builders/builders_post.go | 130 +++++ pkg/handlers/builders/handler.go | 101 ++++ pkg/handlers/webhooks/handler.go | 4 +- pkg/types/builder.go | 43 +- pkg/types/daemon.go | 8 + pkg/types/enums/enums.go | 18 + pkg/types/enums/enums_enum.go | 184 +++++++ 45 files changed, 2489 insertions(+), 216 deletions(-) create mode 100644 cmd/imports/builder.go create mode 100644 pkg/builder/builder.go create mode 100644 pkg/builder/docker/docker.go create mode 100644 pkg/builder/docker/docker_test.go create mode 100644 pkg/builder/docker/informer.go create mode 100644 pkg/builder/k8s/k8s.go create mode 100644 pkg/builder/logger/database/database.go create mode 100644 pkg/builder/logger/doc.go create mode 100644 pkg/builder/logger/logger.go create mode 100644 pkg/builder/logger/obs/obs.go create mode 100644 pkg/builder/podman/podman.go create mode 100644 pkg/configs/checker.go create mode 100644 pkg/daemon/builder/builder.go create mode 100644 pkg/dal/query/builder_runners.gen.go create mode 100644 pkg/handlers/builders/builders_post.go create mode 100644 pkg/handlers/builders/handler.go diff --git a/.gitignore b/.gitignore index a89e08b6..e8d5cfc2 100644 --- a/.gitignore +++ b/.gitignore @@ -190,3 +190,5 @@ conf/ximager-dev.yaml *.db pkg/**/*.html + +.vscode/ diff --git a/.golangci.yml b/.golangci.yml index d384aa83..0a3c6f80 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -17,7 +17,7 @@ linters-settings: whitespace: multi-func: true cyclop: - max-complexity: 30 + max-complexity: 50 package-average: 30 skip-tests: true gci: diff --git a/cmd/builder/builder.go b/cmd/builder/builder.go index 2deb94a1..a96f0d9d 100644 --- a/cmd/builder/builder.go +++ b/cmd/builder/builder.go @@ -52,7 +52,7 @@ const ( cacheOut = "/opt/cache_out" knownHosts = "known_hosts" privateKey = "private_key" - dockerConfig = "docker_config.json" + dockerConfig = "config.json" buildkitdConfigFilename = "buildkitd.toml" workspace = "/code" compressedCache = "cache.tgz" @@ -173,14 +173,12 @@ func (b Builder) initToken() error { defer func() { _ = privateKeyObj.Close() // nolint: errcheck }() - if !utils.IsFile(path.Join(homeSigma, privateKey)) { - _, err = privateKeyObj.WriteString(b.ScmSshKey) - if err != nil { - return fmt.Errorf("Write private key failed: %v", err) - } + _, err = privateKeyObj.WriteString(b.ScmSshKey) + if err != nil { + return fmt.Errorf("Write private key failed: %v", err) } } - if b.OciRegistryPassword != "" && b.OciRegistryUsername != "" { + if len(b.OciRegistryDomain) != 0 { dockerConfigObj, err := os.Create(path.Join(homeSigma, dockerConfig)) if err != nil { return fmt.Errorf("Create file failed: %v", dockerConfigObj) @@ -188,20 +186,20 @@ func (b Builder) initToken() error { defer func() { _ = dockerConfigObj.Close() // nolint: errcheck }() - cf := configfile.ConfigFile{ - AuthConfigs: map[string]dockertypes.AuthConfig{ - b.OciRegistryDomain: { - Username: b.OciRegistryUsername, - Password: b.OciRegistryPassword, - }, - }, - } - if !utils.IsFile(path.Join(homeSigma, dockerConfig)) { - err = cf.SaveToWriter(dockerConfigObj) - if err != nil { - return fmt.Errorf("Save docker config failed: %v", err) + cf := configfile.ConfigFile{} + cf.AuthConfigs = make(map[string]dockertypes.AuthConfig) + for index, domain := range b.OciRegistryDomain { + if len(b.OciRegistryUsername[index]) != 0 || len(b.OciRegistryPassword[index]) != 0 { + cf.AuthConfigs[domain] = dockertypes.AuthConfig{ + Username: b.OciRegistryUsername[index], + Password: b.OciRegistryPassword[index], + } } } + err = cf.SaveToWriter(dockerConfigObj) + if err != nil { + return fmt.Errorf("Save docker config failed: %v", err) + } } var btConfig buildkitdconfig.Config if len(b.BuildkitInsecureRegistries) > 0 { @@ -251,7 +249,7 @@ func (b Builder) gitClone() error { if b.ScmDepth != 0 { cmd.Args = append(cmd.Args, "--depth", strconv.Itoa(b.ScmDepth)) } - if b.ScmSubModule { + if b.ScmSubmodule { cmd.Args = append(cmd.Args, "--recurse-submodules") } if b.ScmCredentialType == enums.ScmCredentialTypeSsh { @@ -307,7 +305,7 @@ func (b Builder) build() error { cmd.Args = append(cmd.Args, "--opt", fmt.Sprintf("platform=%s", strings.Join(platforms, ","))) } cmd.Args = append(cmd.Args, "--frontend", "gateway.v0", "--opt", "source=docker/dockerfile") // TODO: set frontend - cmd.Args = append(cmd.Args, "--output", fmt.Sprintf("type=image,name=%s,push=false", b.OciName)) // TODO: set output push true + cmd.Args = append(cmd.Args, "--output", fmt.Sprintf("type=image,name=%s,push=true", b.OciName)) // TODO: set output push true cmd.Args = append(cmd.Args, "--export-cache", fmt.Sprintf("type=local,mode=max,compression=gzip,dest=%s", cacheOut)) // TODO: set cache volume cmd.Args = append(cmd.Args, "--import-cache", fmt.Sprintf("type=local,src=%s", cacheIn)) // TODO: set cache volume @@ -316,6 +314,7 @@ func (b Builder) build() error { buildkitdFlags += fmt.Sprintf("--config=%s", path.Join(homeSigma, buildkitdConfigFilename)) } cmd.Env = append(os.Environ(), fmt.Sprintf("BUILDKITD_FLAGS=%s", buildkitdFlags)) + cmd.Env = append(cmd.Env, fmt.Sprintf("DOCKER_CONFIG=%s", homeSigma)) log.Info().Str("command", cmd.String()).Strs("env", cmd.Env).Msg("Building image") cmd.Stdout = os.Stdout diff --git a/cmd/builder/checker.go b/cmd/builder/checker.go index 1e97d1e9..914c78f1 100644 --- a/cmd/builder/checker.go +++ b/cmd/builder/checker.go @@ -33,7 +33,7 @@ func (b *Builder) checker() error { return fmt.Errorf("SCM_SSH_KEY should be set, if SCM_CREDENTIAL_TYPE is 'ssh'") } if b.ScmSshKey != "" { - b.ScmSshKey, err = crypt.Decrypt(b.ID, b.ScmSshKey) + b.ScmSshKey, err = crypt.Decrypt(fmt.Sprintf("%d-%d", b.ID, b.RunnerID), b.ScmSshKey) if err != nil { return fmt.Errorf("Decrypt ssh key failed: %v", err) } @@ -43,7 +43,7 @@ func (b *Builder) checker() error { return fmt.Errorf("SCM_TOKEN should be set, if SCM_CREDENTIAL_TYPE is 'token'") } if b.ScmToken != "" { - b.ScmToken, err = crypt.Decrypt(b.ID, b.ScmToken) + b.ScmToken, err = crypt.Decrypt(fmt.Sprintf("%d-%d", b.ID, b.RunnerID), b.ScmToken) if err != nil { return fmt.Errorf("Decrypt scm token failed: %v", err) } @@ -56,7 +56,7 @@ func (b *Builder) checker() error { return fmt.Errorf("SCM_USERNAME and SCM_PASSWORD should be set, if SCM_CREDENTIAL_TYPE is 'username'") } if b.ScmPassword != "" { - b.ScmPassword, err = crypt.Decrypt(b.ID, b.ScmPassword) + b.ScmPassword, err = crypt.Decrypt(fmt.Sprintf("%d-%d", b.ID, b.RunnerID), b.ScmPassword) if err != nil { return fmt.Errorf("Decrypt scm password failed: %v", err) } @@ -71,8 +71,12 @@ func (b *Builder) checker() error { } } - if b.OciRegistryPassword != "" { - b.OciRegistryPassword, err = crypt.Decrypt(b.ID, b.OciRegistryPassword) + if len(b.OciRegistryDomain) != len(b.OciRegistryUsername) || len(b.OciRegistryDomain) != len(b.OciRegistryPassword) { + return fmt.Errorf("OCI_REGISTRY_DOMAIN length should equal OCI_REGISTRY_USERNAME and OCI_REGISTRY_PASSWORD") + } + + for index, password := range b.OciRegistryPassword { + b.OciRegistryPassword[index], err = crypt.Decrypt(fmt.Sprintf("%d-%d", b.ID, b.RunnerID), password) if err != nil { return fmt.Errorf("Decrypt oci registry password failed: %v", err) } diff --git a/cmd/imports/apis.go b/cmd/imports/apis.go index bc99967c..6cdffa63 100644 --- a/cmd/imports/apis.go +++ b/cmd/imports/apis.go @@ -17,6 +17,7 @@ package imports import ( _ "github.com/go-sigma/sigma/pkg/handlers/apidocs" _ "github.com/go-sigma/sigma/pkg/handlers/artifacts" + _ "github.com/go-sigma/sigma/pkg/handlers/builders" _ "github.com/go-sigma/sigma/pkg/handlers/daemons" _ "github.com/go-sigma/sigma/pkg/handlers/namespaces" _ "github.com/go-sigma/sigma/pkg/handlers/oauth2" diff --git a/cmd/imports/builder.go b/cmd/imports/builder.go new file mode 100644 index 00000000..a76c2ef3 --- /dev/null +++ b/cmd/imports/builder.go @@ -0,0 +1,23 @@ +// Copyright 2023 sigma +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package imports + +import ( + _ "github.com/go-sigma/sigma/pkg/builder/docker" + _ "github.com/go-sigma/sigma/pkg/builder/k8s" + _ "github.com/go-sigma/sigma/pkg/builder/logger/database" + _ "github.com/go-sigma/sigma/pkg/builder/logger/obs" + _ "github.com/go-sigma/sigma/pkg/builder/podman" +) diff --git a/cmd/imports/daemon.go b/cmd/imports/daemon.go index 35b7589a..185ad1d2 100644 --- a/cmd/imports/daemon.go +++ b/cmd/imports/daemon.go @@ -15,6 +15,7 @@ package imports import ( + _ "github.com/go-sigma/sigma/pkg/daemon/builder" _ "github.com/go-sigma/sigma/pkg/daemon/gc" _ "github.com/go-sigma/sigma/pkg/daemon/sbom" _ "github.com/go-sigma/sigma/pkg/daemon/vulnerability" diff --git a/cmd/root.go b/cmd/root.go index 6fe9e820..815f807c 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -23,6 +23,8 @@ import ( "github.com/spf13/cobra" "github.com/spf13/viper" + "github.com/go-sigma/sigma/pkg/configs" + _ "github.com/go-sigma/sigma/cmd/imports" ) @@ -66,4 +68,6 @@ func initConfig() { viper.SetEnvKeyReplacer(strings.NewReplacer(".", "_")) cobra.CheckErr(viper.ReadInConfig()) + + cobra.CheckErr(viper.Unmarshal(configs.GetConfiguration())) } diff --git a/conf/config-dev.yaml b/conf/config-dev.yaml index 7ad44805..ab8d988e 100644 --- a/conf/config-dev.yaml +++ b/conf/config-dev.yaml @@ -22,31 +22,31 @@ database: dbname: ximager sslmode: disable -# deploy avaliable: single, replica +# deploy available: single, replica # replica should use external redis deploy: single redis: - # redis type avaliable: internal, external + # redis type available: internal, external type: internal url: redis://:sigma@localhost:6379/0 cache: - # the cache type avaliable is: redis + # the cache type available is: redis type: redis - # please attation in multi + # please attention in multi inmemory: {} redis: {} workqueue: - # the workqueue type avaliable: redis + # the workqueue type available: redis type: redis redis: {} namespace: # push image to registry, if namespace not exist, it will be created automatically autoCreate: true - # the automatic created namespace visibility, avaliable: public, private + # the automatic created namespace visibility, available: public, private visibility: public http: diff --git a/conf/config.yaml b/conf/config.yaml index 87d21c89..0ae7fe73 100644 --- a/conf/config.yaml +++ b/conf/config.yaml @@ -22,31 +22,31 @@ database: dbname: ximager sslmode: disable -# deploy avaliable: single, replica +# deploy available: single, replica # replica should use external redis deploy: single redis: - # redis type avaliable: internal, external + # redis type available: internal, external type: internal url: redis://:sigma@localhost:6379/0 cache: - # the cache type avaliable is: redis + # the cache type available is: redis type: redis - # please attation in multi + # please attention in multi inmemory: {} redis: {} workqueue: - # the workqueue type avaliable: redis + # the workqueue type available: redis type: redis redis: {} namespace: # push image to registry, if namespace not exist, it will be created automatically autoCreate: false - # the automatic created namespace visibility, avaliable: public, private + # the automatic created namespace visibility, available: public, private visibility: public http: diff --git a/pkg/builder/builder.go b/pkg/builder/builder.go new file mode 100644 index 00000000..48250f61 --- /dev/null +++ b/pkg/builder/builder.go @@ -0,0 +1,115 @@ +// Copyright 2023 sigma +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package builder + +import ( + "context" + "fmt" + "io" + "strings" + + builderlogger "github.com/go-sigma/sigma/pkg/builder/logger" + "github.com/go-sigma/sigma/pkg/types" + "github.com/go-sigma/sigma/pkg/utils" + "github.com/go-sigma/sigma/pkg/utils/crypt" +) + +// Builder ... +type Builder interface { + // Start start a container to build oci image and push to registry + Start(ctx context.Context, builderConfig BuilderConfig) error + // Stop stop the container + Stop(ctx context.Context, builderID, runnerID int64) error + // Restart wrap stop and start + Restart(ctx context.Context, builderConfig BuilderConfig) error + // LogStream get the real time log stream + LogStream(ctx context.Context, builderID, runnerID int64, writer io.Writer) error +} + +type BuilderConfig struct { + types.Builder +} + +// Driver is the builder driver, maybe implement by docker, podman, k8s, etc. +var Driver Builder + +// Factory is the interface for the builder driver factory +type Factory interface { + New() (Builder, error) +} + +// DriverFactories ... +var DriverFactories = make(map[string]Factory) + +func Initialize() error { + typ := "docker" + factory, ok := DriverFactories[typ] + if !ok { + return fmt.Errorf("builder driver %q not registered", typ) + } + var err error + Driver, err = factory.New() + if err != nil { + return err + } + return builderlogger.Initialize() +} + +// BuildEnv ... +func BuildEnv(builderConfig BuilderConfig) []string { + buildConfigEnvs := []string{ + fmt.Sprintf("ID=%d", builderConfig.ID), + fmt.Sprintf("RUNNER_ID=%d", builderConfig.RunnerID), + + fmt.Sprintf("SCM_CREDENTIAL_TYPE=%s", builderConfig.ScmCredentialType.String()), + fmt.Sprintf("SCM_USERNAME=%s", builderConfig.ScmUsername), + fmt.Sprintf("SCM_PROVIDER=%s", builderConfig.ScmProvider.String()), + fmt.Sprintf("SCM_REPOSITORY=%s", builderConfig.ScmRepository), + fmt.Sprintf("SCM_BRANCH=%s", builderConfig.ScmBranch), + fmt.Sprintf("SCM_DEPTH=%d", builderConfig.ScmDepth), + fmt.Sprintf("SCM_SUBMODULE=%t", builderConfig.ScmSubmodule), + + fmt.Sprintf("OCI_REGISTRY_DOMAIN=%s", strings.Join(builderConfig.OciRegistryDomain, ",")), + fmt.Sprintf("OCI_REGISTRY_USERNAME=%s", strings.Join(builderConfig.OciRegistryUsername, ",")), + fmt.Sprintf("OCI_NAME=%s", builderConfig.OciName), + + fmt.Sprintf("BUILDKIT_INSECURE_REGISTRIES=%s", strings.Join(builderConfig.BuildkitInsecureRegistries, ",")), + fmt.Sprintf("BUILDKIT_CACHE_DIR=%s", builderConfig.BuildkitCacheDir), + fmt.Sprintf("BUILDKIT_CONTEXT=%s", builderConfig.BuildkitContext), + fmt.Sprintf("BUILDKIT_DOCKERFILE=%s", builderConfig.BuildkitDockerfile), + fmt.Sprintf("BUILDKIT_PLATFORMS=%s", utils.StringsJoin(builderConfig.BuildkitPlatforms, ",")), + } + if builderConfig.ScmPassword != "" { + buildConfigEnvs = append(buildConfigEnvs, fmt.Sprintf("SCM_PASSWORD=%s", crypt.MustEncrypt( + fmt.Sprintf("%d-%d", builderConfig.ID, builderConfig.RunnerID), builderConfig.ScmPassword))) + } + if builderConfig.ScmSshKey != "" { + buildConfigEnvs = append(buildConfigEnvs, fmt.Sprintf("SCM_SSH_KEY=%s", crypt.MustEncrypt( + fmt.Sprintf("%d-%d", builderConfig.ID, builderConfig.RunnerID), builderConfig.ScmSshKey))) + } + if builderConfig.ScmToken != "" { + buildConfigEnvs = append(buildConfigEnvs, fmt.Sprintf("SCM_TOKEN=%s", crypt.MustEncrypt( + fmt.Sprintf("%d-%d", builderConfig.ID, builderConfig.RunnerID), builderConfig.ScmToken))) + } + if len(builderConfig.OciRegistryPassword) != 0 { + var passwords []string + for _, p := range builderConfig.OciRegistryPassword { + passwords = append(passwords, crypt.MustEncrypt(fmt.Sprintf("%d-%d", builderConfig.ID, builderConfig.RunnerID), p)) + } + buildConfigEnvs = append(buildConfigEnvs, fmt.Sprintf("OCI_REGISTRY_PASSWORD=%s", strings.Join(passwords, ","))) + } + + return buildConfigEnvs +} diff --git a/pkg/builder/docker/docker.go b/pkg/builder/docker/docker.go new file mode 100644 index 00000000..2a6ac9ad --- /dev/null +++ b/pkg/builder/docker/docker.go @@ -0,0 +1,172 @@ +// Copyright 2023 sigma +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package docker + +import ( + "context" + "fmt" + "io" + "path" + "reflect" + "strconv" + "strings" + "time" + + mapset "github.com/deckarep/golang-set/v2" + dockertypes "github.com/docker/docker/api/types" + "github.com/docker/docker/api/types/container" + "github.com/docker/docker/client" + "github.com/docker/docker/pkg/stdcopy" + "github.com/rs/zerolog/log" + + "github.com/go-sigma/sigma/pkg/builder" + "github.com/go-sigma/sigma/pkg/consts" + "github.com/go-sigma/sigma/pkg/dal/dao" +) + +func init() { + builder.DriverFactories[path.Base(reflect.TypeOf(factory{}).PkgPath())] = &factory{} +} + +type factory struct{} + +var _ builder.Factory = factory{} + +// New returns a new filesystem storage driver +func (f factory) New() (builder.Builder, error) { + cli, err := client.NewClientWithOpts(client.FromEnv) + if err != nil { + return nil, fmt.Errorf("Create docker client failed: %v", err) + } + i := &instance{ + client: cli, + controlled: mapset.NewSet[string](), + builderServiceFactory: dao.NewBuilderServiceFactory(), + } + go i.informer(context.Background()) + return i, nil +} + +type instance struct { + client *client.Client + controlled mapset.Set[string] // the controlled container in docker container + builderServiceFactory dao.BuilderServiceFactory +} + +var _ builder.Builder = instance{} + +// Start start a container to build oci image and push to registry +func (i instance) Start(ctx context.Context, builderConfig builder.BuilderConfig) error { + containerConfig := &container.Config{ + Image: "docker.io/library/builder:dev", + Entrypoint: []string{}, + Cmd: []string{"sigma-builder"}, + Env: builder.BuildEnv(builderConfig), + Labels: map[string]string{ + "oci-image-builder": consts.AppName, + "builder-id": strconv.FormatInt(builderConfig.ID, 10), + "runner-id": strconv.FormatInt(builderConfig.RunnerID, 10), + }, + } + hostConfig := &container.HostConfig{ + SecurityOpt: []string{"seccomp=unconfined", "apparmor=unconfined"}, + } + _, err := i.client.ContainerCreate(ctx, containerConfig, hostConfig, nil, nil, i.genContainerID(builderConfig.ID, builderConfig.RunnerID)) + if err != nil { + return fmt.Errorf("Create container failed: %v", err) + } + return i.client.ContainerStart(ctx, i.genContainerID(builderConfig.ID, builderConfig.RunnerID), dockertypes.ContainerStartOptions{}) +} + +const ( + retryMax = 10 + retryDuration = time.Second +) + +// Stop stop the container +func (i instance) Stop(ctx context.Context, builderID, runnerID int64) error { + err := i.client.ContainerKill(ctx, i.genContainerID(builderID, runnerID), "SIGKILL") + if err != nil { + log.Error().Err(err).Str("id", i.genContainerID(builderID, runnerID)).Msg("Kill container failed") + return fmt.Errorf("Kill container failed: %v", err) + } + err = i.client.ContainerRemove(ctx, i.genContainerID(builderID, runnerID), dockertypes.ContainerRemoveOptions{}) + if err != nil { + log.Error().Err(err).Str("id", i.genContainerID(builderID, runnerID)).Msg("Remove container failed") + return fmt.Errorf("Remove container failed: %v", err) + } + for j := 0; j < retryMax; j++ { + _, err = i.client.ContainerInspect(ctx, i.genContainerID(builderID, runnerID)) + if err != nil { + if strings.Contains(err.Error(), fmt.Sprintf("No such container: %s", i.genContainerID(builderID, runnerID))) { + return nil + } + return fmt.Errorf("Inspect container with error: %v", err) + } + <-time.After(retryDuration) + } + return nil +} + +// Restart wrap stop and start +func (i instance) Restart(ctx context.Context, builderConfig builder.BuilderConfig) error { + err := i.Stop(ctx, builderConfig.ID, builderConfig.RunnerID) + if err != nil { + return err + } + err = i.Start(ctx, builderConfig) + if err != nil { + return err + } + return nil +} + +// LogStream get the real time log stream +func (i instance) LogStream(ctx context.Context, builderID, runnerID int64, writer io.Writer) error { + reader, err := i.client.ContainerLogs(ctx, i.genContainerID(builderID, runnerID), dockertypes.ContainerLogsOptions{ + ShowStdout: true, + ShowStderr: true, + Follow: true, + }) + if err != nil { + return fmt.Errorf("Get container logs failed: %v", err) + } + _, err = stdcopy.StdCopy(writer, writer, reader) + return err +} + +// genContainerID ... +func (i instance) genContainerID(builderID, runnerID int64) string { + return fmt.Sprintf("sigma-builder-%d-%d", builderID, runnerID) +} + +// getBuilderTaskID ... +func (i instance) getBuilderTaskID(containerName string) (int64, int64, error) { + containerName = strings.TrimPrefix(containerName, "/") + ids := strings.TrimPrefix(containerName, "sigma-builder-") + if len(strings.Split(ids, "-")) != 2 { + return 0, 0, fmt.Errorf("Parse builder task id(%s) failed", containerName) + } + builderIDStr, runnerIDStr := strings.Split(ids, "-")[0], strings.Split(ids, "-")[1] + builderID, err := strconv.ParseInt(builderIDStr, 10, 0) + if err != nil { + return 0, 0, fmt.Errorf("Parse builder task id(%s) failed: %v", containerName, err) + } + runnerID, err := strconv.ParseInt(runnerIDStr, 10, 0) + if err != nil { + return 0, 0, fmt.Errorf("Parse builder task id(%s) failed: %v", containerName, err) + } + return builderID, runnerID, nil +} diff --git a/pkg/builder/docker/docker_test.go b/pkg/builder/docker/docker_test.go new file mode 100644 index 00000000..3ac5a555 --- /dev/null +++ b/pkg/builder/docker/docker_test.go @@ -0,0 +1,37 @@ +// Copyright 2023 sigma +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package docker + +// import ( +// "context" +// "fmt" +// "testing" +// "time" + +// "github.com/stretchr/testify/assert" +// ) + +// func TestStart(t *testing.T) { +// var f = factory{} +// i, err := f.New() +// assert.NoError(t, err) + +// ctx := context.Background() +// err = i.Start(ctx, "test") +// assert.NoError(t, err) +// time.Sleep(time.Second * 5) +// fmt.Println(i.Stop(ctx, "test")) +// time.Sleep(time.Hour) +// } diff --git a/pkg/builder/docker/informer.go b/pkg/builder/docker/informer.go new file mode 100644 index 00000000..46a3ee54 --- /dev/null +++ b/pkg/builder/docker/informer.go @@ -0,0 +1,155 @@ +// Copyright 2023 sigma +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package docker + +import ( + "context" + "fmt" + + "github.com/docker/docker/api/types" + "github.com/docker/docker/pkg/stdcopy" + "github.com/rs/zerolog/log" + + builderlogger "github.com/go-sigma/sigma/pkg/builder/logger" + "github.com/go-sigma/sigma/pkg/consts" + "github.com/go-sigma/sigma/pkg/dal/query" + "github.com/go-sigma/sigma/pkg/types/enums" +) + +func (i *instance) informer(ctx context.Context) { + go func(ctx context.Context) { + eventsOpt := types.EventsOptions{} + events, errs := i.client.Events(ctx, eventsOpt) + for { + select { + case <-ctx.Done(): + return + case event := <-events: + switch event.Type { // nolint: gocritic + case "container": + switch event.Action { + case "start": + container, err := i.client.ContainerInspect(ctx, event.Actor.ID) + if err != nil { + log.Error().Err(err).Str("id", event.Actor.ID).Msg("Inspect container failed") + continue + } + if container.Config != nil && container.Config.Labels != nil { + if container.Config.Labels["oci-image-builder"] != consts.AppName || + container.Config.Labels["builder-id"] == "" || + container.Config.Labels["runner-id"] == "" { + log.Debug().Msgf("Container not controlled by %s", consts.AppName) + continue + } + } + if container.ContainerJSONBase != nil && container.ContainerJSONBase.State != nil && + (container.ContainerJSONBase.State.Running || + container.ContainerJSONBase.State.Status == "running" || // TODO: we should test all case + container.ContainerJSONBase.State.Status == "exited") { + log.Info().Str("id", event.Actor.ID).Str("name", container.ContainerJSONBase.Name).Msg("Builder container started") + builderID, runnerID, err := i.getBuilderTaskID(container.ContainerJSONBase.Name) + if err != nil { + log.Error().Err(err).Str("container", container.ContainerJSONBase.Name).Msg("Parse builder task id failed") + continue + } + go func(id string) { + err := i.logStore(ctx, event.Actor.ID, builderID, runnerID) + if err != nil { + log.Error().Err(err).Str("id", id).Msg("Get container log failed") + } + }(event.Actor.ID) + } + case "die": + container, err := i.client.ContainerInspect(ctx, event.Actor.ID) + if err != nil { + log.Error().Err(err).Str("id", event.Actor.ID).Msg("Inspect container failed") + continue + } + if container.Config != nil && container.Config.Labels != nil && container.ContainerJSONBase != nil { + if container.Config.Labels["oci-image-builder"] != consts.AppName || + container.Config.Labels["builder-id"] == "" || + container.Config.Labels["runner-id"] == "" { + log.Debug().Msgf("Container not controlled by %s", consts.AppName) + continue + } + } + i.controlled.Remove(event.Actor.ID) + + builderID, runnerID, err := i.getBuilderTaskID(container.ContainerJSONBase.Name) + if err != nil { + log.Error().Err(err).Str("container", container.ContainerJSONBase.Name).Msg("Parse builder task id failed") + continue + } + + builderService := i.builderServiceFactory.New() + updates := make(map[string]any, 1) + if container.ContainerJSONBase != nil && container.ContainerJSONBase.State != nil { + if container.ContainerJSONBase.State.ExitCode == 0 { + updates = map[string]any{query.BuilderRunner.Status.ColumnName().String(): enums.BuildStatusSuccess} + log.Info().Str("id", event.Actor.ID).Str("name", container.ContainerJSONBase.Name).Msg("Builder container succeed") + } else { + updates = map[string]any{query.BuilderRunner.Status.ColumnName().String(): enums.BuildStatusFailed} + log.Error().Int("ExitCode", container.ContainerJSONBase.State.ExitCode). + Str("Error", container.ContainerJSONBase.State.Error). + Bool("OOMKilled", container.ContainerJSONBase.State.OOMKilled). + Msg("Builder container exited") + } + } + err = builderService.UpdateRunner(ctx, builderID, runnerID, updates) + if err != nil { + log.Error().Err(err).Msg("Update runner failed") + } + } + } + case err := <-errs: + log.Error().Err(err).Msg("Docker event error") + } + } + }(ctx) +} + +func (i *instance) logStore(ctx context.Context, containerID string, builderID, runnerID int64) error { + ok := i.controlled.Add(containerID) + if !ok { + log.Error().Str("container", containerID).Int64("builder", builderID).Int64("runner", runnerID).Msg("Add container id to controlled array failed") + return fmt.Errorf("Add container id to controlled array failed") + } + reader, err := i.client.ContainerLogs(ctx, containerID, types.ContainerLogsOptions{ + ShowStdout: true, + ShowStderr: true, + Follow: true, + }) + if err != nil { + return fmt.Errorf("Get container logs failed: %v", err) + } + + writer := builderlogger.Driver.Write(builderID, runnerID) + _, err = stdcopy.StdCopy(writer, writer, reader) + if err != nil { + return fmt.Errorf("Copy container logs failed: %v", err) + } + err = writer.Close() + if err != nil { + return fmt.Errorf("Close container logs failed: %v", err) + } + + err = i.client.ContainerRemove(ctx, containerID, types.ContainerRemoveOptions{}) + if err != nil { + log.Error().Err(err).Str("container", containerID).Int64("builder", builderID).Int64("runner", runnerID).Msg("Remove container failed") + return fmt.Errorf("Remove container failed: %v", err) + } + + return err +} diff --git a/pkg/builder/k8s/k8s.go b/pkg/builder/k8s/k8s.go new file mode 100644 index 00000000..a4f4fc58 --- /dev/null +++ b/pkg/builder/k8s/k8s.go @@ -0,0 +1,15 @@ +// Copyright 2023 sigma +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package k8s diff --git a/pkg/builder/logger/database/database.go b/pkg/builder/logger/database/database.go new file mode 100644 index 00000000..7de198a3 --- /dev/null +++ b/pkg/builder/logger/database/database.go @@ -0,0 +1,105 @@ +// Copyright 2023 sigma +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package obs + +import ( + "bytes" + "compress/gzip" + "context" + "fmt" + "io" + "path" + "reflect" + "strconv" + + "github.com/rs/zerolog/log" + + builderlogger "github.com/go-sigma/sigma/pkg/builder/logger" + "github.com/go-sigma/sigma/pkg/dal/dao" + "github.com/go-sigma/sigma/pkg/dal/query" +) + +func init() { + builderlogger.DriverFactories[path.Base(reflect.TypeOf(factory{}).PkgPath())] = &factory{} +} + +type factory struct{} + +var _ builderlogger.Factory = factory{} + +// New returns a new filesystem storage driver +func (f factory) New() (builderlogger.BuilderLogger, error) { + return &database{ + db: dao.NewBuilderServiceFactory(), + }, nil +} + +type database struct { + db dao.BuilderServiceFactory +} + +func (d *database) Write(builderID, runnerID int64) io.WriteCloser { + buffer := new(bytes.Buffer) + gw := gzip.NewWriter(buffer) + return &writer{ + builderID: builderID, + runnerID: runnerID, + gw: gw, + data: buffer, + db: d.db, + } +} + +type writer struct { + builderID int64 + runnerID int64 + gw *gzip.Writer + data *bytes.Buffer + db dao.BuilderServiceFactory +} + +// Write writes the given bytes to the underlying storage +func (w *writer) Write(p []byte) (n int, err error) { + return w.gw.Write(p) +} + +// Close closes the writer and flushes the data to the underlying storage +func (w *writer) Close() error { + err := w.gw.Flush() + if err != nil { + return err + } + builderService := w.db.New() + data := w.data.Bytes() + log.Info().Int("len", len(data)).Msg("Create builder log success") + updates := map[string]any{ + query.BuilderRunner.Log.ColumnName().String(): data, + } + return builderService.UpdateRunner(context.Background(), w.builderID, w.runnerID, updates) +} + +// Read returns a reader for the given id +func (d *database) Read(ctx context.Context, id string) (io.Reader, error) { + builderId, err := strconv.ParseInt(id, 10, 0) + if err != nil { + return nil, fmt.Errorf("Failed to parse builder id: %v", err) + } + builderService := d.db.New() + builderLog, err := builderService.GetRunner(ctx, builderId) + if err != nil { + return nil, fmt.Errorf("Failed to get builder log: %v", err) + } + return bytes.NewReader(builderLog.Log), nil +} diff --git a/pkg/builder/logger/doc.go b/pkg/builder/logger/doc.go new file mode 100644 index 00000000..3311e1b3 --- /dev/null +++ b/pkg/builder/logger/doc.go @@ -0,0 +1,17 @@ +// Copyright 2023 sigma +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// package logger support for builder log write to object storage or database, +// and support for read builder history log. +package logger diff --git a/pkg/builder/logger/logger.go b/pkg/builder/logger/logger.go new file mode 100644 index 00000000..13dc17aa --- /dev/null +++ b/pkg/builder/logger/logger.go @@ -0,0 +1,54 @@ +// Copyright 2023 sigma +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package logger + +import ( + "context" + "fmt" + "io" +) + +// BuilderLogger ... +type BuilderLogger interface { + // Write write log to object storage or database + Write(builderID, runnerID int64) io.WriteCloser + // Read get log from object storage or database + Read(ctx context.Context, id string) (io.Reader, error) +} + +// Driver is the builder logger driver, maybe implement by s3, database, etc. +var Driver BuilderLogger + +// Factory is the interface for the builder logger factory +type Factory interface { + New() (BuilderLogger, error) +} + +// DriverFactories ... +var DriverFactories = make(map[string]Factory) + +func Initialize() error { + typ := "database" + factory, ok := DriverFactories[typ] + if !ok { + return fmt.Errorf("builder logger driver %q not registered", typ) + } + var err error + Driver, err = factory.New() + if err != nil { + return err + } + return nil +} diff --git a/pkg/builder/logger/obs/obs.go b/pkg/builder/logger/obs/obs.go new file mode 100644 index 00000000..3365ab59 --- /dev/null +++ b/pkg/builder/logger/obs/obs.go @@ -0,0 +1,91 @@ +// Copyright 2023 sigma +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package obs + +import ( + "bytes" + "compress/gzip" + "context" + "fmt" + "io" + "path" + "reflect" + + builderlogger "github.com/go-sigma/sigma/pkg/builder/logger" + "github.com/go-sigma/sigma/pkg/consts" + "github.com/go-sigma/sigma/pkg/storage" + "github.com/go-sigma/sigma/pkg/utils" +) + +func init() { + builderlogger.DriverFactories[path.Base(reflect.TypeOf(factory{}).PkgPath())] = &factory{} +} + +type factory struct{} + +var _ builderlogger.Factory = factory{} + +// New returns a new filesystem storage driver +func (f factory) New() (builderlogger.BuilderLogger, error) { + return &obs{ + storage: storage.Driver, + }, nil +} + +type obs struct { + storage storage.StorageDriver +} + +// Write returns a writer for the given id +func (o *obs) Write(builderID, runnerID int64) io.WriteCloser { + buffer := new(bytes.Buffer) + gw := gzip.NewWriter(buffer) + return &writer{ + builderID: builderID, + runnerID: runnerID, + gw: gw, + data: buffer, + storage: o.storage, + } +} + +type writer struct { + builderID int64 + runnerID int64 + gw *gzip.Writer + data *bytes.Buffer + storage storage.StorageDriver +} + +// Write writes the given bytes to the writer +func (w *writer) Write(p []byte) (n int, err error) { + return w.gw.Write(p) +} + +// Close closes the writer +func (w *writer) Close() error { + err := w.gw.Flush() + if err != nil { + return err + } + return w.storage.Upload(context.Background(), + fmt.Sprintf("%s.log.gz", path.Join(consts.BuilderLogs, fmt.Sprintf("%d/%d", w.builderID, w.runnerID))), + bytes.NewReader(w.data.Bytes())) +} + +// Read returns a reader for the given id +func (o *obs) Read(ctx context.Context, id string) (io.Reader, error) { + return o.storage.Reader(ctx, fmt.Sprintf("%s.log.gz", path.Join(consts.BuilderLogs, utils.DirWithSlash(id))), 0) +} diff --git a/pkg/builder/podman/podman.go b/pkg/builder/podman/podman.go new file mode 100644 index 00000000..b6b9ef28 --- /dev/null +++ b/pkg/builder/podman/podman.go @@ -0,0 +1,15 @@ +// Copyright 2023 sigma +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package podman diff --git a/pkg/cmds/server/server.go b/pkg/cmds/server/server.go index 2190aadf..319b472d 100644 --- a/pkg/cmds/server/server.go +++ b/pkg/cmds/server/server.go @@ -30,6 +30,7 @@ import ( "github.com/shirou/gopsutil/v3/process" "github.com/spf13/viper" + "github.com/go-sigma/sigma/pkg/builder" "github.com/go-sigma/sigma/pkg/consts" "github.com/go-sigma/sigma/pkg/daemon" "github.com/go-sigma/sigma/pkg/handlers" @@ -79,7 +80,11 @@ func Serve(config ServerConfig) error { handlers.InitializeDistribution(e) } if !config.WithoutWorker { - err := daemon.InitializeServer() + err := builder.Initialize() + if err != nil { + return err + } + err = daemon.InitializeServer() if err != nil { return err } diff --git a/pkg/cmds/worker/worker.go b/pkg/cmds/worker/worker.go index bd3a8b74..00eb4568 100644 --- a/pkg/cmds/worker/worker.go +++ b/pkg/cmds/worker/worker.go @@ -26,6 +26,7 @@ import ( "github.com/rs/zerolog/log" "github.com/spf13/viper" + "github.com/go-sigma/sigma/pkg/builder" "github.com/go-sigma/sigma/pkg/consts" "github.com/go-sigma/sigma/pkg/daemon" "github.com/go-sigma/sigma/pkg/middlewares" @@ -33,7 +34,12 @@ import ( // Worker is the worker initialization func Worker() error { - err := daemon.InitializeServer() + err := builder.Initialize() + if err != nil { + return err + } + + err = daemon.InitializeServer() if err != nil { return err } diff --git a/pkg/configs/checker.go b/pkg/configs/checker.go new file mode 100644 index 00000000..b8df7a2d --- /dev/null +++ b/pkg/configs/checker.go @@ -0,0 +1,31 @@ +// Copyright 2023 sigma +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package configs + +import ( + "fmt" + + "github.com/go-sigma/sigma/pkg/types/enums" +) + +// CheckDeploy ... +func (c *Configuration) CheckDeploy() error { + if c.Deploy == enums.DeployReplica { + if c.Redis.Type == enums.RedisTypeInternal { + return fmt.Errorf("Deploy replica should use external redis") + } + } + return nil +} diff --git a/pkg/configs/configuration.go b/pkg/configs/configuration.go index 10a4b373..8855f4c6 100644 --- a/pkg/configs/configuration.go +++ b/pkg/configs/configuration.go @@ -14,75 +14,190 @@ package configs -import "github.com/go-sigma/sigma/pkg/types/enums" +import ( + "github.com/go-sigma/sigma/pkg/types/enums" +) + +var configuration = &Configuration{} + +// GetConfiguration ... +func GetConfiguration() *Configuration { + return configuration +} + +// Configuration ... +type Configuration struct { + Log ConfigurationLog `yaml:"log"` + Database ConfigurationDatabase `yaml:"database"` + Deploy enums.Deploy `yaml:"deploy"` + Redis ConfigurationRedis `yaml:"redis"` + Cache ConfigurationCache `yaml:"cache"` + WorkQueue ConfigurationWorkQueue `yaml:"workqueue"` + Namespace ConfigurationNamespace `yaml:"namespace"` + HTTP ConfigurationHTTP `yaml:"http"` + Storage ConfigurationStorage `yaml:"storage"` + Proxy ConfigurationProxy `yaml:"proxy"` + Daemon ConfigurationDaemon `yaml:"daemon"` + Auth ConfigurationAuth `yaml:"auth"` +} // ConfigurationLog ... type ConfigurationLog struct { - Level enums.LogLevel - ProxyLevel enums.LogLevel + Level enums.LogLevel `yaml:"level"` + ProxyLevel enums.LogLevel `yaml:"proxyLevel"` } // ConfigurationDatabaseSqlite3 ... type ConfigurationDatabaseSqlite3 struct { - Path string + Path string `yaml:"path"` } // ConfigurationDatabaseMysql ... type ConfigurationDatabaseMysql struct { - Host string - Port int - User string - Password string - DBName string + Host string `yaml:"host"` + Port int `yaml:"port"` + User string `yaml:"user"` + Password string `yaml:"password"` + DBName string `yaml:"dbname"` } // ConfigurationDatabase ... type ConfigurationDatabasePostgresql struct { - Host string - Port int - User string - Password string - DBName string - SslMode bool + Host string `yaml:"host"` + Port int `yaml:"port"` + User string `yaml:"user"` + Password string `yaml:"password"` + DBName string `yaml:"dbname"` + SslMode string `yaml:"sslmode"` } // ConfigurationDatabase ... type ConfigurationDatabase struct { - Type enums.Database - Sqlite3 ConfigurationDatabaseSqlite3 - Mysql ConfigurationDatabaseMysql - Postgresql ConfigurationDatabasePostgresql + Type enums.Database `yaml:"type"` + Sqlite3 ConfigurationDatabaseSqlite3 `yaml:"sqlite3"` + Mysql ConfigurationDatabaseMysql `yaml:"mysql"` + Postgresql ConfigurationDatabasePostgresql `yaml:"postgresql"` } // ConfigurationRedis ... type ConfigurationRedis struct { - Type enums.RedisType - Url string + Type enums.RedisType `yaml:"type"` + Url string `yaml:"url"` } // ConfigurationCache ... type ConfigurationCache struct { - Type enums.CacheType + Type enums.CacheType `yaml:"type"` } // ConfigurationWorkQueue ... type ConfigurationWorkQueue struct { - Type enums.WorkQueueType + Type enums.WorkQueueType `yaml:"type"` } // ConfigurationNamespace ... type ConfigurationNamespace struct { - AutoCreate bool - Visibility enums.Visibility + AutoCreate bool `yaml:"autoCreate"` + Visibility enums.Visibility `yaml:"visibility"` } -// Configuration ... -type Configuration struct { - Log ConfigurationLog - Database ConfigurationDatabase - Deploy enums.Deploy - Redis ConfigurationRedis - Cache ConfigurationCache - WorkQueue ConfigurationWorkQueue - Namespace ConfigurationNamespace +// ConfigurationHttpTLS ... +type ConfigurationHttpTLS struct { + Enabled bool `yaml:"enabled"` + Certificate string `yaml:"certificate"` + Key string `yaml:"key"` +} + +// ConfigurationHTTP ... +type ConfigurationHTTP struct { + Endpoint string `yaml:"endpoint"` + InternalEndpoint string `yaml:"internalEndpoint"` + TLS ConfigurationHttpTLS `yaml:"tls"` +} + +// ConfigurationStorageFilesystem ... +type ConfigurationStorageFilesystem struct { + Path string `yaml:"path"` +} + +// ConfigurationStorageS3 ... +type ConfigurationStorageS3 struct { + Ak string `yaml:"ak"` + Sk string `yaml:"sk"` + Endpoint string `yaml:"endpoint"` + Region string `yaml:"region"` + Bucket string `yaml:"bucket"` + ForcePathStyle bool `yaml:"forcePathStyle"` +} + +// ConfigurationStorage ... +type ConfigurationStorage struct { + RootDirectory string `yaml:"rootDirectory"` + Type string `yaml:"type"` + Filesystem ConfigurationStorageFilesystem `yaml:"filesystem"` + S3 ConfigurationStorageS3 `yaml:"s3"` +} + +// ConfigurationProxy ... +type ConfigurationProxy struct { + Enabled string `yaml:"enabled"` + Endpoint string `yaml:"endpoint"` + TlsVerify bool `yaml:"tlsVerify"` + Username string `yaml:"username"` + Password string `yaml:"password"` +} + +// ConfigurationDaemonGc ... +type ConfigurationDaemonGc struct { + Retention string `yaml:"retention"` + Cron string `yaml:"cron"` +} + +// ConfigurationDaemon ... +type ConfigurationDaemon struct { + Gc ConfigurationDaemonGc `yaml:"gc"` +} + +// ConfigurationAuthInternalUser ... +type ConfigurationAuthInternalUser struct { + Username string `yaml:"username"` + Password string `yaml:"password"` +} + +// ConfigurationAuthAdmin ... +type ConfigurationAuthAdmin struct { + Username string `yaml:"username"` + Password string `yaml:"password"` +} + +// ConfigurationAuthToken ... +type ConfigurationAuthToken struct { + Realm string `yaml:"realm"` + Service string `yaml:"service"` +} + +// ConfigurationAuthJwt ... +type ConfigurationAuthJwt struct { + Ttl string `yaml:"ttl"` + RefreshTtl string `yaml:"refreshTtl"` + PrivateKey string `yaml:"privateKey"` +} + +// ConfigurationAuthOauth2Github ... +type ConfigurationAuthOauth2Github struct { + ClientID string `yaml:"clientId"` + ClientSecret string `yaml:"clientSecret"` +} + +// ConfigurationAuthOauth2 ... +type ConfigurationAuthOauth2 struct { + Github ConfigurationAuthOauth2Github `yaml:"github"` +} + +// ConfigurationAuth ... +type ConfigurationAuth struct { + InternalUser ConfigurationAuthInternalUser `yaml:"internalUser"` + Admin ConfigurationAuthAdmin `yaml:"admin"` + Token ConfigurationAuthToken `yaml:"token"` + Oauth2 ConfigurationAuthOauth2 `yaml:"oauth2"` } diff --git a/pkg/consts/topics.go b/pkg/consts/topics.go index 0cd4eb26..9e54ced2 100644 --- a/pkg/consts/topics.go +++ b/pkg/consts/topics.go @@ -25,4 +25,6 @@ const ( TopicGcRepository = "gc_repository" // TopicWebhook is the topic for the webhook TopicWebhook = "webhook" + // TopicBuilder is the topic for the builder + TopicBuilder = "builder" ) diff --git a/pkg/daemon/builder/builder.go b/pkg/daemon/builder/builder.go new file mode 100644 index 00000000..0bf4cf1e --- /dev/null +++ b/pkg/daemon/builder/builder.go @@ -0,0 +1,112 @@ +// Copyright 2023 sigma +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package builder + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/hibiken/asynq" + "github.com/rs/zerolog/log" + + builderdriver "github.com/go-sigma/sigma/pkg/builder" + "github.com/go-sigma/sigma/pkg/daemon" + "github.com/go-sigma/sigma/pkg/dal/dao" + "github.com/go-sigma/sigma/pkg/dal/models" + "github.com/go-sigma/sigma/pkg/types" + "github.com/go-sigma/sigma/pkg/types/enums" + "github.com/go-sigma/sigma/pkg/utils" +) + +func init() { + utils.PanicIf(daemon.RegisterTask(enums.DaemonBuilder, builderRunner)) +} + +func builderRunner(ctx context.Context, task *asynq.Task) error { + var payload types.DaemonBuilderPayload + err := json.Unmarshal(task.Payload(), &payload) + if err != nil { + return fmt.Errorf("Unmarshal payload failed: %v", err) + } + b := builder{ + builderServiceFactory: dao.NewBuilderServiceFactory(), + } + return b.runner(ctx, payload) +} + +type builder struct { + builderServiceFactory dao.BuilderServiceFactory +} + +func (b builder) runner(ctx context.Context, payload types.DaemonBuilderPayload) error { + if payload.Action == enums.DaemonBuilderActionStop { + return builderdriver.Driver.Stop(ctx, payload.ID, payload.RunnerID) + } + builderService := b.builderServiceFactory.New() + builderObj, err := builderService.GetByRepositoryID(ctx, payload.RepositoryID) + if err != nil { + log.Error().Err(err).Int64("id", payload.RepositoryID).Msg("Get builder record failed") + return fmt.Errorf("Get builder record failed") + } + + runnerObj := &models.BuilderRunner{ + BuilderID: payload.ID, + Status: enums.BuildStatusPending, + } + err = builderService.CreateRunner(ctx, runnerObj) + if err != nil { + log.Error().Err(err).Msg("Create builder runner record failed") + return fmt.Errorf("Create builder runner record failed: %v", err) + } + + buildConfig := builderdriver.BuilderConfig{ + Builder: types.Builder{ + ID: payload.ID, + RunnerID: runnerObj.ID, + + ScmCredentialType: builderObj.ScmCredentialType, + ScmProvider: enums.ScmProviderGithub, + ScmSshKey: builderObj.ScmSshKey, + ScmToken: builderObj.ScmToken, + ScmUsername: builderObj.ScmUsername, + ScmPassword: builderObj.ScmPassword, + ScmRepository: builderObj.ScmRepository, + ScmBranch: builderObj.ScmBranch, + ScmDepth: builderObj.ScmDepth, + ScmSubmodule: builderObj.ScmSubmodule, + + OciRegistryDomain: []string{"192.168.31.114:3000"}, + OciRegistryUsername: []string{"ximager"}, + OciRegistryPassword: []string{"ximager"}, + OciName: "192.168.31.114:3000/library/test:dev", + + BuildkitInsecureRegistries: []string{"192.168.31.114:3000@http"}, + }, + } + if payload.Action == enums.DaemonBuilderActionStart { // nolint: gocritic + err = builderdriver.Driver.Start(ctx, buildConfig) + } else if payload.Action == enums.DaemonBuilderActionRestart { + err = builderdriver.Driver.Start(ctx, buildConfig) + } else { + log.Error().Err(err).Str("action", payload.Action.String()).Msg("Daemon builder action not found") + return fmt.Errorf("Daemon builder action not found") + } + if err != nil { + log.Error().Err(err).Msg("Start or restart builder failed") + return fmt.Errorf("Start or restart builder failed: %v", err) + } + return nil +} diff --git a/pkg/daemon/daemon.go b/pkg/daemon/daemon.go index 69d31888..b56ac4bb 100644 --- a/pkg/daemon/daemon.go +++ b/pkg/daemon/daemon.go @@ -38,6 +38,7 @@ var topics = map[enums.Daemon]string{ enums.DaemonGc: consts.TopicGc, enums.DaemonGcRepository: consts.TopicGcRepository, enums.DaemonWebhook: consts.TopicWebhook, + enums.DaemonBuilder: consts.TopicBuilder, } var ( diff --git a/pkg/daemon/gc/gc.go b/pkg/daemon/gc/gc.go index f1f90900..e528eb46 100644 --- a/pkg/daemon/gc/gc.go +++ b/pkg/daemon/gc/gc.go @@ -16,9 +16,9 @@ package gc import ( "context" + "encoding/json" "fmt" - "github.com/bytedance/sonic" "github.com/hibiken/asynq" "github.com/rs/zerolog/log" @@ -47,7 +47,7 @@ type gc struct { func runner(ctx context.Context, task *asynq.Task) error { var payload types.DaemonGcPayload - err := sonic.Unmarshal(task.Payload(), &payload) + err := json.Unmarshal(task.Payload(), &payload) if err != nil { return err } diff --git a/pkg/dal/cmd/gen.go b/pkg/dal/cmd/gen.go index 4a452883..cc884c85 100644 --- a/pkg/dal/cmd/gen.go +++ b/pkg/dal/cmd/gen.go @@ -44,7 +44,7 @@ func main() { models.Webhook{}, models.WebhookLog{}, models.Builder{}, - models.BuilderLog{}, + models.BuilderRunner{}, ) g.Execute() diff --git a/pkg/dal/dao/builder.go b/pkg/dal/dao/builder.go index 9833fb54..5d775c73 100644 --- a/pkg/dal/dao/builder.go +++ b/pkg/dal/dao/builder.go @@ -17,6 +17,8 @@ package dao import ( "context" + "gorm.io/gorm" + "github.com/go-sigma/sigma/pkg/dal/models" "github.com/go-sigma/sigma/pkg/dal/query" ) @@ -25,10 +27,14 @@ import ( type BuilderService interface { // Create creates a new builder record in the database Create(ctx context.Context, audit *models.Builder) error - // CreateLog creates a new BuilderLog record in the database - CreateLog(ctx context.Context, log *models.BuilderLog) error - // GetLog get log from object storage or database - GetLog(ctx context.Context, id int64) (*models.BuilderLog, error) + // Get get builder by repository id + GetByRepositoryID(ctx context.Context, repositoryID int64) (*models.Builder, error) + // CreateRunner creates a new builder runner record in the database + CreateRunner(ctx context.Context, log *models.BuilderRunner) error + // GetRunner get runner from object storage or database + GetRunner(ctx context.Context, id int64) (*models.BuilderRunner, error) + // UpdateRunner update builder runner + UpdateRunner(ctx context.Context, builderID, runnerID int64, updates map[string]interface{}) error } type builderService struct { @@ -59,15 +65,32 @@ func (f *builderServiceFactory) New(txs ...*query.Query) BuilderService { // Create creates a new builder record in the database func (s builderService) Create(ctx context.Context, builder *models.Builder) error { - return s.tx.WithContext(ctx).Builder.Create(builder) + return s.tx.Builder.WithContext(ctx).Create(builder) +} + +// Get get builder by repository id +func (s builderService) GetByRepositoryID(ctx context.Context, repositoryID int64) (*models.Builder, error) { + return s.tx.Builder.WithContext(ctx).Where(s.tx.Builder.RepositoryID.Eq(repositoryID)).First() } -// CreateLog creates a new BuilderLog record in the database -func (s builderService) CreateLog(ctx context.Context, log *models.BuilderLog) error { - return s.tx.WithContext(ctx).BuilderLog.Create(log) +// CreateRunner creates a new builder runner record in the database +func (s builderService) CreateRunner(ctx context.Context, log *models.BuilderRunner) error { + return s.tx.BuilderRunner.WithContext(ctx).Create(log) } -// GetLog get log from object storage or database -func (s builderService) GetLog(ctx context.Context, id int64) (*models.BuilderLog, error) { - return s.tx.WithContext(ctx).BuilderLog.Where(s.tx.BuilderLog.BuilderID.Eq(id)).First() +// GetRunner get runner from object storage or database +func (s builderService) GetRunner(ctx context.Context, id int64) (*models.BuilderRunner, error) { + return s.tx.BuilderRunner.WithContext(ctx).Where(s.tx.BuilderRunner.BuilderID.Eq(id)).First() +} + +// UpdateRunner update builder runner +func (s builderService) UpdateRunner(ctx context.Context, builderID, runnerID int64, updates map[string]interface{}) error { + matched, err := s.tx.BuilderRunner.WithContext(ctx).Where(s.tx.BuilderRunner.BuilderID.Eq(builderID), s.tx.BuilderRunner.ID.Eq(runnerID)).Updates(updates) + if err != nil { + return err + } + if matched.RowsAffected == 0 { + return gorm.ErrRecordNotFound + } + return nil } diff --git a/pkg/dal/migrations/mysql/0001_initialize.up.sql b/pkg/dal/migrations/mysql/0001_initialize.up.sql index f48246dd..fc03a03a 100644 --- a/pkg/dal/migrations/mysql/0001_initialize.up.sql +++ b/pkg/dal/migrations/mysql/0001_initialize.up.sql @@ -5,8 +5,8 @@ CREATE TABLE IF NOT EXISTS `users` ( `username` varchar(64) NOT NULL UNIQUE, `password` varchar(256), `email` varchar(256), - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, CONSTRAINT `users_unique_with_username` UNIQUE (`username`, `deleted_at`) ); @@ -15,8 +15,8 @@ CREATE TABLE IF NOT EXISTS `user_recover_codes` ( `id` bigint AUTO_INCREMENT PRIMARY KEY, `user_id` bigint NOT NULL, `code` varchar(256) NOT NULL, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, FOREIGN KEY (`user_id`) REFERENCES `users` (`id`), CONSTRAINT `user_recover_codes_unique_with_use_id` UNIQUE (`user_id`, `deleted_at`) @@ -48,8 +48,8 @@ CREATE TABLE IF NOT EXISTS `audits` ( `resource` varchar(256) NOT NULL, `before_raw` BLOB, `req_raw` BLOB, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, FOREIGN KEY (`user_id`) REFERENCES `users` (`id`), FOREIGN KEY (`namespace_id`) REFERENCES `namespaces` (`id`) @@ -87,8 +87,8 @@ CREATE TABLE IF NOT EXISTS `artifacts` ( `pushed_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `last_pull` timestamp, `pull_times` bigint NOT NULL DEFAULT 0, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, FOREIGN KEY (`repository_id`) REFERENCES `repositories` (`id`), CONSTRAINT `artifacts_unique_with_repo` UNIQUE (`repository_id`, `digest`, `deleted_at`) @@ -103,8 +103,8 @@ CREATE TABLE IF NOT EXISTS `artifact_sboms` ( `stdout` MEDIUMBLOB, `stderr` MEDIUMBLOB, `message` varchar(256), - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, FOREIGN KEY (`artifact_id`) REFERENCES `artifacts` (`id`), CONSTRAINT `artifact_sbom_unique_with_artifact` UNIQUE (`artifact_id`, `deleted_at`) @@ -120,8 +120,8 @@ CREATE TABLE IF NOT EXISTS `artifact_vulnerabilities` ( `stdout` MEDIUMBLOB, `stderr` MEDIUMBLOB, `message` varchar(256), - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, FOREIGN KEY (`artifact_id`) REFERENCES `artifacts` (`id`), CONSTRAINT `artifact_vulnerability_unique_with_artifact` UNIQUE (`artifact_id`, `deleted_at`) @@ -135,8 +135,8 @@ CREATE TABLE IF NOT EXISTS `tags` ( `pushed_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `last_pull` timestamp, `pull_times` bigint NOT NULL DEFAULT 0, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, FOREIGN KEY (`repository_id`) REFERENCES `repositories` (`id`), FOREIGN KEY (`artifact_id`) REFERENCES `artifacts` (`id`), @@ -151,8 +151,8 @@ CREATE TABLE IF NOT EXISTS `blobs` ( `pushed_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `last_pull` timestamp, `pull_times` bigint NOT NULL DEFAULT 0, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, CONSTRAINT `blobs_unique_with_digest` UNIQUE (`digest`, `deleted_at`) ); @@ -165,8 +165,8 @@ CREATE TABLE IF NOT EXISTS `blob_uploads` ( `repository` varchar(256) NOT NULL, `file_id` varchar(256) NOT NULL, `size` bigint NOT NULL DEFAULT 0, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, CONSTRAINT `blob_uploads_unique_with_upload_id_etag` UNIQUE (`upload_id`, `etag`, `deleted_at`) ); @@ -193,10 +193,10 @@ CREATE TABLE IF NOT EXISTS `daemon_logs` ( `type` ENUM ('Gc', 'Vulnerability', 'Sbom') NOT NULL, `action` ENUM ('create', 'update', 'delete', 'pull', 'push') NOT NULL, `resource` varchar(256) NOT NULL, - `status` ENUM ('Success', 'Failed') NOT NULL, + `status` ENUM ('Success', 'Failed', 'Pending', 'Doing') NOT NULL, `message` BLOB, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0 ); @@ -250,8 +250,8 @@ CREATE TABLE IF NOT EXISTS `webhooks` ( `event_tag` tinyint NOT NULL DEFAULT 1, `event_pull_push` tinyint NOT NULL DEFAULT 1, `event_member` tinyint NOT NULL DEFAULT 1, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0 ); @@ -264,25 +264,39 @@ CREATE TABLE IF NOT EXISTS `webhook_logs` ( `req_body` BLOB NOT NULL, `resp_header` BLOB NOT NULL, `resp_body` BLOB, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, FOREIGN KEY (`webhook_id`) REFERENCES `webhooks` (`id`) ); CREATE TABLE IF NOT EXISTS `builders` ( `id` bigint AUTO_INCREMENT PRIMARY KEY, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, - `deleted_at` bigint NOT NULL DEFAULT 0 + `repository_id` bigint NOT NULL, + `active` tinyint NOT NULL DEFAULT 1, + `scm_credential_type` varchar(16) NOT NULL, + `scm_ssh_key` BLOB, + `scm_token` varchar(256), + `scm_username` varchar(30), + `scm_password` varchar(30), + `scm_repository` varchar(256) NOT NULL, + `scm_branch` varchar(30) NOT NULL DEFAULT 'main', + `scm_depth` MEDIUMINT NOT NULL DEFAULT 0, + `scm_submodule` tinyint NOT NULL DEFAULT 1, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `deleted_at` bigint NOT NULL DEFAULT 0, + FOREIGN KEY (`repository_id`) REFERENCES `repositories` (`id`), + CONSTRAINT `builders_unique_with_repository` UNIQUE (`repository_id`, `deleted_at`) ); -CREATE TABLE IF NOT EXISTS `builder_logs` ( +CREATE TABLE IF NOT EXISTS `builder_runners` ( `id` bigint AUTO_INCREMENT PRIMARY KEY, `builder_id` bigint NOT NULL, `log` LONGBLOB, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `status` ENUM ('Success', 'Failed', 'Pending', 'Scheduling', 'Building') NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, FOREIGN KEY (`builder_id`) REFERENCES `builders` (`id`) ); diff --git a/pkg/dal/migrations/postgresql/0001_initialize.up.sql b/pkg/dal/migrations/postgresql/0001_initialize.up.sql index 7b63ab3b..9da2d721 100644 --- a/pkg/dal/migrations/postgresql/0001_initialize.up.sql +++ b/pkg/dal/migrations/postgresql/0001_initialize.up.sql @@ -10,8 +10,8 @@ CREATE TABLE IF NOT EXISTS "users" ( "username" varchar(64) NOT NULL UNIQUE, "password" varchar(256) NOT NULL, "email" varchar(256), - "created_at" timestamp NOT NULL, - "updated_at" timestamp NOT NULL, + "created_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updated_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, "deleted_at" bigint NOT NULL DEFAULT 0, CONSTRAINT "users_unique_with_username" UNIQUE ("username", "deleted_at") ); @@ -20,8 +20,8 @@ CREATE TABLE IF NOT EXISTS "user_recover_codes" ( "id" bigserial PRIMARY KEY, "user_id" bigint NOT NULL, "code" varchar(256) NOT NULL, - "created_at" timestamp NOT NULL, - "updated_at" timestamp NOT NULL, + "created_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updated_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, "deleted_at" integer NOT NULL DEFAULT 0, FOREIGN KEY ("user_id") REFERENCES "users" ("id"), CONSTRAINT "user_recover_codes_unique_with_user_id" UNIQUE ("user_id", "deleted_at") @@ -72,8 +72,8 @@ CREATE TABLE IF NOT EXISTS "audits" ( "resource" varchar(256) NOT NULL, "before_raw" bytea, "req_raw" bytea, - "created_at" timestamp NOT NULL, - "updated_at" timestamp NOT NULL, + "created_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updated_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, "deleted_at" bigint NOT NULL DEFAULT 0, FOREIGN KEY ("user_id") REFERENCES "users" ("id"), FOREIGN KEY ("namespace_id") REFERENCES "namespaces" ("id") @@ -121,8 +121,8 @@ CREATE TABLE IF NOT EXISTS "artifacts" ( "pushed_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, "last_pull" timestamp, "pull_times" bigint NOT NULL DEFAULT 0, - "created_at" timestamp NOT NULL, - "updated_at" timestamp NOT NULL, + "created_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updated_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, "deleted_at" bigint NOT NULL DEFAULT 0, FOREIGN KEY ("repository_id") REFERENCES "repositories" ("id"), CONSTRAINT "artifacts_unique_with_repo" UNIQUE ("repository_id", "digest", "deleted_at") @@ -144,8 +144,8 @@ CREATE TABLE IF NOT EXISTS "artifact_sboms" ( "stdout" bytea, "stderr" bytea, "message" varchar(256), - "created_at" timestamp NOT NULL, - "updated_at" timestamp NOT NULL, + "created_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updated_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, "deleted_at" bigint NOT NULL DEFAULT 0, FOREIGN KEY ("artifact_id") REFERENCES "artifacts" ("id"), CONSTRAINT "artifact_sbom_unique_with_artifact" UNIQUE ("artifact_id", "deleted_at") @@ -161,8 +161,8 @@ CREATE TABLE IF NOT EXISTS "artifact_vulnerabilities" ( "stdout" bytea, "stderr" bytea, "message" varchar(256), - "created_at" timestamp NOT NULL, - "updated_at" timestamp NOT NULL, + "created_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updated_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, "deleted_at" bigint NOT NULL DEFAULT 0, FOREIGN KEY ("artifact_id") REFERENCES "artifacts" ("id"), CONSTRAINT "artifact_vulnerability_unique_with_artifact" UNIQUE ("artifact_id", "deleted_at") @@ -176,8 +176,8 @@ CREATE TABLE IF NOT EXISTS "tags" ( "pushed_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, "last_pull" timestamp, "pull_times" bigint NOT NULL DEFAULT 0, - "created_at" timestamp NOT NULL, - "updated_at" timestamp NOT NULL, + "created_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updated_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, "deleted_at" bigint NOT NULL DEFAULT 0, FOREIGN KEY ("repository_id") REFERENCES "repositories" ("id"), FOREIGN KEY ("artifact_id") REFERENCES "artifacts" ("id"), @@ -319,15 +319,37 @@ CREATE TABLE IF NOT EXISTS "webhook_logs" ( CREATE TABLE IF NOT EXISTS "builders" ( "id" bigserial PRIMARY KEY, + "repository_id" bigint NOT NULL, + "active" smallint NOT NULL DEFAULT 1, + "scm_credential_type" varchar(16) NOT NULL, + "scm_ssh_key" BLOB, + "scm_token" varchar(256), + "scm_username" varchar(30), + "scm_password" varchar(30), + "scm_repository" varchar(256) NOT NULL, + "scm_branch" varchar(30) NOT NULL DEFAULT 'main', + "scm_depth" MEDIUMINT NOT NULL DEFAULT 0, + "scm_submodule" smallint NOT NULL DEFAULT 1, "created_at" timestamp NOT NULL, "updated_at" timestamp NOT NULL, - "deleted_at" bigint NOT NULL DEFAULT 0 + "deleted_at" bigint NOT NULL DEFAULT 0, + FOREIGN KEY ("repository_id") REFERENCES "repositories" ("id"), + CONSTRAINT "builders_unique_with_repository" UNIQUE ("repository_id", "deleted_at") +); + +CREATE TYPE build_status AS ENUM ( + 'Pending', + 'Doing', + 'Success', + 'Failed', + 'Scheduling' ); -CREATE TABLE IF NOT EXISTS "builder_logs" ( +CREATE TABLE IF NOT EXISTS "builder_runners" ( "id" bigserial PRIMARY KEY, "builder_id" bigint NOT NULL, "log" bytea, + "status" build_status NOT NULL DEFAULT 'Pending', "created_at" timestamp NOT NULL, "updated_at" timestamp NOT NULL, "deleted_at" bigint NOT NULL DEFAULT 0, diff --git a/pkg/dal/migrations/sqlite3/0001_initialize.up.sql b/pkg/dal/migrations/sqlite3/0001_initialize.up.sql index d2a6df27..b70a9cb5 100644 --- a/pkg/dal/migrations/sqlite3/0001_initialize.up.sql +++ b/pkg/dal/migrations/sqlite3/0001_initialize.up.sql @@ -5,8 +5,8 @@ CREATE TABLE IF NOT EXISTS `users` ( `username` varchar(64) NOT NULL UNIQUE, `password` varchar(256), `email` varchar(256), - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, CONSTRAINT `users_unique_with_username` UNIQUE (`username`, `deleted_at`) ); @@ -15,8 +15,8 @@ CREATE TABLE IF NOT EXISTS `user_recover_codes` ( `id` integer PRIMARY KEY AUTOINCREMENT, `user_id` integer NOT NULL, `code` varchar(256) NOT NULL, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` integer NOT NULL DEFAULT 0, FOREIGN KEY (`user_id`) REFERENCES `users` (`id`), CONSTRAINT `user_recover_codes_unique_with_use_id` UNIQUE (`user_id`, `deleted_at`) @@ -48,8 +48,8 @@ CREATE TABLE IF NOT EXISTS `audits` ( `resource` varchar(256) NOT NULL, `before_raw` BLOB, `req_raw` BLOB, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, FOREIGN KEY (`user_id`) REFERENCES `users` (`id`), FOREIGN KEY (`namespace_id`) REFERENCES `namespaces` (`id`) @@ -87,8 +87,8 @@ CREATE TABLE IF NOT EXISTS `artifacts` ( `pushed_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `last_pull` timestamp, `pull_times` bigint NOT NULL DEFAULT 0, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, FOREIGN KEY (`repository_id`) REFERENCES `repositories` (`id`), CONSTRAINT `artifacts_unique_with_repo` UNIQUE (`repository_id`, `digest`, `deleted_at`) @@ -103,8 +103,8 @@ CREATE TABLE IF NOT EXISTS `artifact_sboms` ( `stdout` BLOB, `stderr` BLOB, `message` varchar(256), - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, FOREIGN KEY (`artifact_id`) REFERENCES `artifacts` (`id`), CONSTRAINT `artifact_sbom_unique_with_artifact` UNIQUE (`artifact_id`, `deleted_at`) @@ -120,8 +120,8 @@ CREATE TABLE IF NOT EXISTS `artifact_vulnerabilities` ( `stdout` BLOB, `stderr` BLOB, `message` varchar(256), - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, FOREIGN KEY (`artifact_id`) REFERENCES `artifacts` (`id`), CONSTRAINT `artifact_vulnerability_unique_with_artifact` UNIQUE (`artifact_id`, `deleted_at`) @@ -135,8 +135,8 @@ CREATE TABLE IF NOT EXISTS `tags` ( `pushed_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `last_pull` timestamp, `pull_times` integer NOT NULL DEFAULT 0, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, FOREIGN KEY (`repository_id`) REFERENCES `repositories` (`id`), FOREIGN KEY (`artifact_id`) REFERENCES `artifacts` (`id`), @@ -151,8 +151,8 @@ CREATE TABLE IF NOT EXISTS `blobs` ( `pushed_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `last_pull` timestamp, `pull_times` integer NOT NULL DEFAULT 0, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, CONSTRAINT `blobs_unique_with_digest` UNIQUE (`digest`, `deleted_at`) ); @@ -165,8 +165,8 @@ CREATE TABLE IF NOT EXISTS `blob_uploads` ( `repository` varchar(256) NOT NULL, `file_id` varchar(256) NOT NULL, `size` integer NOT NULL DEFAULT 0, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, CONSTRAINT `blob_uploads_unique_with_upload_id_etag` UNIQUE (`upload_id`, `etag`, `deleted_at`) ); @@ -195,9 +195,9 @@ CREATE TABLE IF NOT EXISTS `daemon_logs` ( `resource` varchar(256) NOT NULL, `status` text CHECK (`status` IN ('Success', 'Failed')) NOT NULL, `message` BLOB, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, - `deleted_at` bigint NOT NULL DEFAULT 0 + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `deleted_at` integer NOT NULL DEFAULT 0 ); CREATE TABLE `casbin_rules` ( @@ -250,9 +250,9 @@ CREATE TABLE IF NOT EXISTS `webhooks` ( `event_tag` integer NOT NULL DEFAULT 1, `event_pull_push` integer NOT NULL DEFAULT 1, `event_member` integer NOT NULL DEFAULT 1, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, - `deleted_at` bigint NOT NULL DEFAULT 0 + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `deleted_at` integer NOT NULL DEFAULT 0 ); CREATE TABLE IF NOT EXISTS `webhook_logs` ( @@ -264,25 +264,39 @@ CREATE TABLE IF NOT EXISTS `webhook_logs` ( `req_body` BLOB NOT NULL, `resp_header` BLOB NOT NULL, `resp_body` BLOB, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, - `deleted_at` bigint NOT NULL DEFAULT 0, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `deleted_at` integer NOT NULL DEFAULT 0, FOREIGN KEY (`webhook_id`) REFERENCES `webhooks` (`id`) ); CREATE TABLE IF NOT EXISTS `builders` ( `id` integer PRIMARY KEY AUTOINCREMENT, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, - `deleted_at` bigint NOT NULL DEFAULT 0 + `repository_id` integer NOT NULL, + `active` integer NOT NULL DEFAULT 1, + `scm_credential_type` varchar(16) NOT NULL, + `scm_ssh_key` BLOB, + `scm_token` varchar(256), + `scm_username` varchar(30), + `scm_password` varchar(30), + `scm_repository` varchar(256) NOT NULL, + `scm_branch` varchar(30) NOT NULL DEFAULT 'main', + `scm_depth` MEDIUMINT NOT NULL DEFAULT 0, + `scm_submodule` integer NOT NULL DEFAULT 1, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `deleted_at` bigint NOT NULL DEFAULT 0, + FOREIGN KEY (`repository_id`) REFERENCES `repositories` (`id`), + CONSTRAINT `builders_unique_with_repository` UNIQUE (`repository_id`, `deleted_at`) ); -CREATE TABLE IF NOT EXISTS `builder_logs` ( +CREATE TABLE IF NOT EXISTS `builder_runners` ( `id` integer PRIMARY KEY AUTOINCREMENT, `builder_id` bigint NOT NULL, `log` BLOB, - `created_at` timestamp NOT NULL, - `updated_at` timestamp NOT NULL, + `status` text CHECK (`status` IN ('Success', 'Failed', 'Pending', 'Scheduling', 'Building')) NOT NULL, + `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `deleted_at` bigint NOT NULL DEFAULT 0, FOREIGN KEY (`builder_id`) REFERENCES `builders` (`id`) ); diff --git a/pkg/dal/models/builder.go b/pkg/dal/models/builder.go index 9ce160e2..8f6d79b3 100644 --- a/pkg/dal/models/builder.go +++ b/pkg/dal/models/builder.go @@ -18,6 +18,8 @@ import ( "time" "gorm.io/plugin/soft_delete" + + "github.com/go-sigma/sigma/pkg/types/enums" ) // Builder represents a builder @@ -26,10 +28,29 @@ type Builder struct { UpdatedAt time.Time DeletedAt soft_delete.DeletedAt `gorm:"softDelete:milli"` ID int64 `gorm:"primaryKey"` + + RepositoryID int64 + Active bool + ScmCredentialType enums.ScmCredentialType + ScmToken string + ScmSshKey string + ScmUsername string + ScmPassword string + ScmRepository string + ScmBranch string + ScmDepth int + ScmSubmodule bool + + BuildkitInsecureRegistries string + BuildkitContext string `gorm:"default:."` + BuildkitDockerfile string `gorm:"default:Dockerfile"` + BuildkitPlatforms string `gorm:"default:linux/amd64"` + + Repository *Repository } -// BuilderLog represents a builder log -type BuilderLog struct { +// BuilderRunner represents a builder runner +type BuilderRunner struct { CreatedAt time.Time UpdatedAt time.Time DeletedAt soft_delete.DeletedAt `gorm:"softDelete:milli"` @@ -37,6 +58,7 @@ type BuilderLog struct { BuilderID int64 Log []byte + Status enums.BuildStatus Builder Builder } diff --git a/pkg/dal/query/builder_logs.gen.go b/pkg/dal/query/builder_logs.gen.go index 927fd882..a7c9077b 100644 --- a/pkg/dal/query/builder_logs.gen.go +++ b/pkg/dal/query/builder_logs.gen.go @@ -23,7 +23,7 @@ func newBuilderLog(db *gorm.DB, opts ...gen.DOOption) builderLog { _builderLog := builderLog{} _builderLog.builderLogDo.UseDB(db, opts...) - _builderLog.builderLogDo.UseModel(&models.BuilderLog{}) + _builderLog.builderLogDo.UseModel(&models.BuilderRunner{}) tableName := _builderLog.builderLogDo.TableName() _builderLog.ALL = field.NewAsterisk(tableName) @@ -33,10 +33,24 @@ func newBuilderLog(db *gorm.DB, opts ...gen.DOOption) builderLog { _builderLog.ID = field.NewInt64(tableName, "id") _builderLog.BuilderID = field.NewInt64(tableName, "builder_id") _builderLog.Log = field.NewBytes(tableName, "log") + _builderLog.Status = field.NewField(tableName, "status") _builderLog.Builder = builderLogBelongsToBuilder{ db: db.Session(&gorm.Session{}), RelationField: field.NewRelation("Builder", "models.Builder"), + Repository: struct { + field.RelationField + Namespace struct { + field.RelationField + } + }{ + RelationField: field.NewRelation("Builder.Repository", "models.Repository"), + Namespace: struct { + field.RelationField + }{ + RelationField: field.NewRelation("Builder.Repository.Namespace", "models.Namespace"), + }, + }, } _builderLog.fillFieldMap() @@ -54,6 +68,7 @@ type builderLog struct { ID field.Int64 BuilderID field.Int64 Log field.Bytes + Status field.Field Builder builderLogBelongsToBuilder fieldMap map[string]field.Expr @@ -77,6 +92,7 @@ func (b *builderLog) updateTableName(table string) *builderLog { b.ID = field.NewInt64(table, "id") b.BuilderID = field.NewInt64(table, "builder_id") b.Log = field.NewBytes(table, "log") + b.Status = field.NewField(table, "status") b.fillFieldMap() @@ -103,13 +119,14 @@ func (b *builderLog) GetFieldByName(fieldName string) (field.OrderExpr, bool) { } func (b *builderLog) fillFieldMap() { - b.fieldMap = make(map[string]field.Expr, 7) + b.fieldMap = make(map[string]field.Expr, 8) b.fieldMap["created_at"] = b.CreatedAt b.fieldMap["updated_at"] = b.UpdatedAt b.fieldMap["deleted_at"] = b.DeletedAt b.fieldMap["id"] = b.ID b.fieldMap["builder_id"] = b.BuilderID b.fieldMap["log"] = b.Log + b.fieldMap["status"] = b.Status } @@ -127,6 +144,13 @@ type builderLogBelongsToBuilder struct { db *gorm.DB field.RelationField + + Repository struct { + field.RelationField + Namespace struct { + field.RelationField + } + } } func (a builderLogBelongsToBuilder) Where(conds ...field.Expr) *builderLogBelongsToBuilder { @@ -152,7 +176,7 @@ func (a builderLogBelongsToBuilder) Session(session *gorm.Session) *builderLogBe return &a } -func (a builderLogBelongsToBuilder) Model(m *models.BuilderLog) *builderLogBelongsToBuilderTx { +func (a builderLogBelongsToBuilder) Model(m *models.BuilderRunner) *builderLogBelongsToBuilderTx { return &builderLogBelongsToBuilderTx{a.db.Model(m).Association(a.Name())} } @@ -288,57 +312,57 @@ func (b builderLogDo) Unscoped() *builderLogDo { return b.withDO(b.DO.Unscoped()) } -func (b builderLogDo) Create(values ...*models.BuilderLog) error { +func (b builderLogDo) Create(values ...*models.BuilderRunner) error { if len(values) == 0 { return nil } return b.DO.Create(values) } -func (b builderLogDo) CreateInBatches(values []*models.BuilderLog, batchSize int) error { +func (b builderLogDo) CreateInBatches(values []*models.BuilderRunner, batchSize int) error { return b.DO.CreateInBatches(values, batchSize) } // Save : !!! underlying implementation is different with GORM // The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) -func (b builderLogDo) Save(values ...*models.BuilderLog) error { +func (b builderLogDo) Save(values ...*models.BuilderRunner) error { if len(values) == 0 { return nil } return b.DO.Save(values) } -func (b builderLogDo) First() (*models.BuilderLog, error) { +func (b builderLogDo) First() (*models.BuilderRunner, error) { if result, err := b.DO.First(); err != nil { return nil, err } else { - return result.(*models.BuilderLog), nil + return result.(*models.BuilderRunner), nil } } -func (b builderLogDo) Take() (*models.BuilderLog, error) { +func (b builderLogDo) Take() (*models.BuilderRunner, error) { if result, err := b.DO.Take(); err != nil { return nil, err } else { - return result.(*models.BuilderLog), nil + return result.(*models.BuilderRunner), nil } } -func (b builderLogDo) Last() (*models.BuilderLog, error) { +func (b builderLogDo) Last() (*models.BuilderRunner, error) { if result, err := b.DO.Last(); err != nil { return nil, err } else { - return result.(*models.BuilderLog), nil + return result.(*models.BuilderRunner), nil } } -func (b builderLogDo) Find() ([]*models.BuilderLog, error) { +func (b builderLogDo) Find() ([]*models.BuilderRunner, error) { result, err := b.DO.Find() - return result.([]*models.BuilderLog), err + return result.([]*models.BuilderRunner), err } -func (b builderLogDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.BuilderLog, err error) { - buf := make([]*models.BuilderLog, 0, batchSize) +func (b builderLogDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.BuilderRunner, err error) { + buf := make([]*models.BuilderRunner, 0, batchSize) err = b.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { defer func() { results = append(results, buf...) }() return fc(tx, batch) @@ -346,7 +370,7 @@ func (b builderLogDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) return results, err } -func (b builderLogDo) FindInBatches(result *[]*models.BuilderLog, batchSize int, fc func(tx gen.Dao, batch int) error) error { +func (b builderLogDo) FindInBatches(result *[]*models.BuilderRunner, batchSize int, fc func(tx gen.Dao, batch int) error) error { return b.DO.FindInBatches(result, batchSize, fc) } @@ -372,23 +396,23 @@ func (b builderLogDo) Preload(fields ...field.RelationField) *builderLogDo { return &b } -func (b builderLogDo) FirstOrInit() (*models.BuilderLog, error) { +func (b builderLogDo) FirstOrInit() (*models.BuilderRunner, error) { if result, err := b.DO.FirstOrInit(); err != nil { return nil, err } else { - return result.(*models.BuilderLog), nil + return result.(*models.BuilderRunner), nil } } -func (b builderLogDo) FirstOrCreate() (*models.BuilderLog, error) { +func (b builderLogDo) FirstOrCreate() (*models.BuilderRunner, error) { if result, err := b.DO.FirstOrCreate(); err != nil { return nil, err } else { - return result.(*models.BuilderLog), nil + return result.(*models.BuilderRunner), nil } } -func (b builderLogDo) FindByPage(offset int, limit int) (result []*models.BuilderLog, count int64, err error) { +func (b builderLogDo) FindByPage(offset int, limit int) (result []*models.BuilderRunner, count int64, err error) { result, err = b.Offset(offset).Limit(limit).Find() if err != nil { return @@ -417,7 +441,7 @@ func (b builderLogDo) Scan(result interface{}) (err error) { return b.DO.Scan(result) } -func (b builderLogDo) Delete(models ...*models.BuilderLog) (result gen.ResultInfo, err error) { +func (b builderLogDo) Delete(models ...*models.BuilderRunner) (result gen.ResultInfo, err error) { return b.DO.Delete(models) } diff --git a/pkg/dal/query/builder_runners.gen.go b/pkg/dal/query/builder_runners.gen.go new file mode 100644 index 00000000..52993c22 --- /dev/null +++ b/pkg/dal/query/builder_runners.gen.go @@ -0,0 +1,453 @@ +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. +// Code generated by gorm.io/gen. DO NOT EDIT. + +package query + +import ( + "context" + + "gorm.io/gorm" + "gorm.io/gorm/clause" + "gorm.io/gorm/schema" + + "gorm.io/gen" + "gorm.io/gen/field" + + "gorm.io/plugin/dbresolver" + + "github.com/go-sigma/sigma/pkg/dal/models" +) + +func newBuilderRunner(db *gorm.DB, opts ...gen.DOOption) builderRunner { + _builderRunner := builderRunner{} + + _builderRunner.builderRunnerDo.UseDB(db, opts...) + _builderRunner.builderRunnerDo.UseModel(&models.BuilderRunner{}) + + tableName := _builderRunner.builderRunnerDo.TableName() + _builderRunner.ALL = field.NewAsterisk(tableName) + _builderRunner.CreatedAt = field.NewTime(tableName, "created_at") + _builderRunner.UpdatedAt = field.NewTime(tableName, "updated_at") + _builderRunner.DeletedAt = field.NewUint(tableName, "deleted_at") + _builderRunner.ID = field.NewInt64(tableName, "id") + _builderRunner.BuilderID = field.NewInt64(tableName, "builder_id") + _builderRunner.Log = field.NewBytes(tableName, "log") + _builderRunner.Status = field.NewField(tableName, "status") + _builderRunner.Builder = builderRunnerBelongsToBuilder{ + db: db.Session(&gorm.Session{}), + + RelationField: field.NewRelation("Builder", "models.Builder"), + Repository: struct { + field.RelationField + Namespace struct { + field.RelationField + } + }{ + RelationField: field.NewRelation("Builder.Repository", "models.Repository"), + Namespace: struct { + field.RelationField + }{ + RelationField: field.NewRelation("Builder.Repository.Namespace", "models.Namespace"), + }, + }, + } + + _builderRunner.fillFieldMap() + + return _builderRunner +} + +type builderRunner struct { + builderRunnerDo builderRunnerDo + + ALL field.Asterisk + CreatedAt field.Time + UpdatedAt field.Time + DeletedAt field.Uint + ID field.Int64 + BuilderID field.Int64 + Log field.Bytes + Status field.Field + Builder builderRunnerBelongsToBuilder + + fieldMap map[string]field.Expr +} + +func (b builderRunner) Table(newTableName string) *builderRunner { + b.builderRunnerDo.UseTable(newTableName) + return b.updateTableName(newTableName) +} + +func (b builderRunner) As(alias string) *builderRunner { + b.builderRunnerDo.DO = *(b.builderRunnerDo.As(alias).(*gen.DO)) + return b.updateTableName(alias) +} + +func (b *builderRunner) updateTableName(table string) *builderRunner { + b.ALL = field.NewAsterisk(table) + b.CreatedAt = field.NewTime(table, "created_at") + b.UpdatedAt = field.NewTime(table, "updated_at") + b.DeletedAt = field.NewUint(table, "deleted_at") + b.ID = field.NewInt64(table, "id") + b.BuilderID = field.NewInt64(table, "builder_id") + b.Log = field.NewBytes(table, "log") + b.Status = field.NewField(table, "status") + + b.fillFieldMap() + + return b +} + +func (b *builderRunner) WithContext(ctx context.Context) *builderRunnerDo { + return b.builderRunnerDo.WithContext(ctx) +} + +func (b builderRunner) TableName() string { return b.builderRunnerDo.TableName() } + +func (b builderRunner) Alias() string { return b.builderRunnerDo.Alias() } + +func (b builderRunner) Columns(cols ...field.Expr) gen.Columns { + return b.builderRunnerDo.Columns(cols...) +} + +func (b *builderRunner) GetFieldByName(fieldName string) (field.OrderExpr, bool) { + _f, ok := b.fieldMap[fieldName] + if !ok || _f == nil { + return nil, false + } + _oe, ok := _f.(field.OrderExpr) + return _oe, ok +} + +func (b *builderRunner) fillFieldMap() { + b.fieldMap = make(map[string]field.Expr, 8) + b.fieldMap["created_at"] = b.CreatedAt + b.fieldMap["updated_at"] = b.UpdatedAt + b.fieldMap["deleted_at"] = b.DeletedAt + b.fieldMap["id"] = b.ID + b.fieldMap["builder_id"] = b.BuilderID + b.fieldMap["log"] = b.Log + b.fieldMap["status"] = b.Status + +} + +func (b builderRunner) clone(db *gorm.DB) builderRunner { + b.builderRunnerDo.ReplaceConnPool(db.Statement.ConnPool) + return b +} + +func (b builderRunner) replaceDB(db *gorm.DB) builderRunner { + b.builderRunnerDo.ReplaceDB(db) + return b +} + +type builderRunnerBelongsToBuilder struct { + db *gorm.DB + + field.RelationField + + Repository struct { + field.RelationField + Namespace struct { + field.RelationField + } + } +} + +func (a builderRunnerBelongsToBuilder) Where(conds ...field.Expr) *builderRunnerBelongsToBuilder { + if len(conds) == 0 { + return &a + } + + exprs := make([]clause.Expression, 0, len(conds)) + for _, cond := range conds { + exprs = append(exprs, cond.BeCond().(clause.Expression)) + } + a.db = a.db.Clauses(clause.Where{Exprs: exprs}) + return &a +} + +func (a builderRunnerBelongsToBuilder) WithContext(ctx context.Context) *builderRunnerBelongsToBuilder { + a.db = a.db.WithContext(ctx) + return &a +} + +func (a builderRunnerBelongsToBuilder) Session(session *gorm.Session) *builderRunnerBelongsToBuilder { + a.db = a.db.Session(session) + return &a +} + +func (a builderRunnerBelongsToBuilder) Model(m *models.BuilderRunner) *builderRunnerBelongsToBuilderTx { + return &builderRunnerBelongsToBuilderTx{a.db.Model(m).Association(a.Name())} +} + +type builderRunnerBelongsToBuilderTx struct{ tx *gorm.Association } + +func (a builderRunnerBelongsToBuilderTx) Find() (result *models.Builder, err error) { + return result, a.tx.Find(&result) +} + +func (a builderRunnerBelongsToBuilderTx) Append(values ...*models.Builder) (err error) { + targetValues := make([]interface{}, len(values)) + for i, v := range values { + targetValues[i] = v + } + return a.tx.Append(targetValues...) +} + +func (a builderRunnerBelongsToBuilderTx) Replace(values ...*models.Builder) (err error) { + targetValues := make([]interface{}, len(values)) + for i, v := range values { + targetValues[i] = v + } + return a.tx.Replace(targetValues...) +} + +func (a builderRunnerBelongsToBuilderTx) Delete(values ...*models.Builder) (err error) { + targetValues := make([]interface{}, len(values)) + for i, v := range values { + targetValues[i] = v + } + return a.tx.Delete(targetValues...) +} + +func (a builderRunnerBelongsToBuilderTx) Clear() error { + return a.tx.Clear() +} + +func (a builderRunnerBelongsToBuilderTx) Count() int64 { + return a.tx.Count() +} + +type builderRunnerDo struct{ gen.DO } + +func (b builderRunnerDo) Debug() *builderRunnerDo { + return b.withDO(b.DO.Debug()) +} + +func (b builderRunnerDo) WithContext(ctx context.Context) *builderRunnerDo { + return b.withDO(b.DO.WithContext(ctx)) +} + +func (b builderRunnerDo) ReadDB() *builderRunnerDo { + return b.Clauses(dbresolver.Read) +} + +func (b builderRunnerDo) WriteDB() *builderRunnerDo { + return b.Clauses(dbresolver.Write) +} + +func (b builderRunnerDo) Session(config *gorm.Session) *builderRunnerDo { + return b.withDO(b.DO.Session(config)) +} + +func (b builderRunnerDo) Clauses(conds ...clause.Expression) *builderRunnerDo { + return b.withDO(b.DO.Clauses(conds...)) +} + +func (b builderRunnerDo) Returning(value interface{}, columns ...string) *builderRunnerDo { + return b.withDO(b.DO.Returning(value, columns...)) +} + +func (b builderRunnerDo) Not(conds ...gen.Condition) *builderRunnerDo { + return b.withDO(b.DO.Not(conds...)) +} + +func (b builderRunnerDo) Or(conds ...gen.Condition) *builderRunnerDo { + return b.withDO(b.DO.Or(conds...)) +} + +func (b builderRunnerDo) Select(conds ...field.Expr) *builderRunnerDo { + return b.withDO(b.DO.Select(conds...)) +} + +func (b builderRunnerDo) Where(conds ...gen.Condition) *builderRunnerDo { + return b.withDO(b.DO.Where(conds...)) +} + +func (b builderRunnerDo) Order(conds ...field.Expr) *builderRunnerDo { + return b.withDO(b.DO.Order(conds...)) +} + +func (b builderRunnerDo) Distinct(cols ...field.Expr) *builderRunnerDo { + return b.withDO(b.DO.Distinct(cols...)) +} + +func (b builderRunnerDo) Omit(cols ...field.Expr) *builderRunnerDo { + return b.withDO(b.DO.Omit(cols...)) +} + +func (b builderRunnerDo) Join(table schema.Tabler, on ...field.Expr) *builderRunnerDo { + return b.withDO(b.DO.Join(table, on...)) +} + +func (b builderRunnerDo) LeftJoin(table schema.Tabler, on ...field.Expr) *builderRunnerDo { + return b.withDO(b.DO.LeftJoin(table, on...)) +} + +func (b builderRunnerDo) RightJoin(table schema.Tabler, on ...field.Expr) *builderRunnerDo { + return b.withDO(b.DO.RightJoin(table, on...)) +} + +func (b builderRunnerDo) Group(cols ...field.Expr) *builderRunnerDo { + return b.withDO(b.DO.Group(cols...)) +} + +func (b builderRunnerDo) Having(conds ...gen.Condition) *builderRunnerDo { + return b.withDO(b.DO.Having(conds...)) +} + +func (b builderRunnerDo) Limit(limit int) *builderRunnerDo { + return b.withDO(b.DO.Limit(limit)) +} + +func (b builderRunnerDo) Offset(offset int) *builderRunnerDo { + return b.withDO(b.DO.Offset(offset)) +} + +func (b builderRunnerDo) Scopes(funcs ...func(gen.Dao) gen.Dao) *builderRunnerDo { + return b.withDO(b.DO.Scopes(funcs...)) +} + +func (b builderRunnerDo) Unscoped() *builderRunnerDo { + return b.withDO(b.DO.Unscoped()) +} + +func (b builderRunnerDo) Create(values ...*models.BuilderRunner) error { + if len(values) == 0 { + return nil + } + return b.DO.Create(values) +} + +func (b builderRunnerDo) CreateInBatches(values []*models.BuilderRunner, batchSize int) error { + return b.DO.CreateInBatches(values, batchSize) +} + +// Save : !!! underlying implementation is different with GORM +// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values) +func (b builderRunnerDo) Save(values ...*models.BuilderRunner) error { + if len(values) == 0 { + return nil + } + return b.DO.Save(values) +} + +func (b builderRunnerDo) First() (*models.BuilderRunner, error) { + if result, err := b.DO.First(); err != nil { + return nil, err + } else { + return result.(*models.BuilderRunner), nil + } +} + +func (b builderRunnerDo) Take() (*models.BuilderRunner, error) { + if result, err := b.DO.Take(); err != nil { + return nil, err + } else { + return result.(*models.BuilderRunner), nil + } +} + +func (b builderRunnerDo) Last() (*models.BuilderRunner, error) { + if result, err := b.DO.Last(); err != nil { + return nil, err + } else { + return result.(*models.BuilderRunner), nil + } +} + +func (b builderRunnerDo) Find() ([]*models.BuilderRunner, error) { + result, err := b.DO.Find() + return result.([]*models.BuilderRunner), err +} + +func (b builderRunnerDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.BuilderRunner, err error) { + buf := make([]*models.BuilderRunner, 0, batchSize) + err = b.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error { + defer func() { results = append(results, buf...) }() + return fc(tx, batch) + }) + return results, err +} + +func (b builderRunnerDo) FindInBatches(result *[]*models.BuilderRunner, batchSize int, fc func(tx gen.Dao, batch int) error) error { + return b.DO.FindInBatches(result, batchSize, fc) +} + +func (b builderRunnerDo) Attrs(attrs ...field.AssignExpr) *builderRunnerDo { + return b.withDO(b.DO.Attrs(attrs...)) +} + +func (b builderRunnerDo) Assign(attrs ...field.AssignExpr) *builderRunnerDo { + return b.withDO(b.DO.Assign(attrs...)) +} + +func (b builderRunnerDo) Joins(fields ...field.RelationField) *builderRunnerDo { + for _, _f := range fields { + b = *b.withDO(b.DO.Joins(_f)) + } + return &b +} + +func (b builderRunnerDo) Preload(fields ...field.RelationField) *builderRunnerDo { + for _, _f := range fields { + b = *b.withDO(b.DO.Preload(_f)) + } + return &b +} + +func (b builderRunnerDo) FirstOrInit() (*models.BuilderRunner, error) { + if result, err := b.DO.FirstOrInit(); err != nil { + return nil, err + } else { + return result.(*models.BuilderRunner), nil + } +} + +func (b builderRunnerDo) FirstOrCreate() (*models.BuilderRunner, error) { + if result, err := b.DO.FirstOrCreate(); err != nil { + return nil, err + } else { + return result.(*models.BuilderRunner), nil + } +} + +func (b builderRunnerDo) FindByPage(offset int, limit int) (result []*models.BuilderRunner, count int64, err error) { + result, err = b.Offset(offset).Limit(limit).Find() + if err != nil { + return + } + + if size := len(result); 0 < limit && 0 < size && size < limit { + count = int64(size + offset) + return + } + + count, err = b.Offset(-1).Limit(-1).Count() + return +} + +func (b builderRunnerDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) { + count, err = b.Count() + if err != nil { + return + } + + err = b.Offset(offset).Limit(limit).Scan(result) + return +} + +func (b builderRunnerDo) Scan(result interface{}) (err error) { + return b.DO.Scan(result) +} + +func (b builderRunnerDo) Delete(models ...*models.BuilderRunner) (result gen.ResultInfo, err error) { + return b.DO.Delete(models) +} + +func (b *builderRunnerDo) withDO(do gen.Dao) *builderRunnerDo { + b.DO = *do.(*gen.DO) + return b +} diff --git a/pkg/dal/query/builders.gen.go b/pkg/dal/query/builders.gen.go index 86bdf73f..93c913cb 100644 --- a/pkg/dal/query/builders.gen.go +++ b/pkg/dal/query/builders.gen.go @@ -31,6 +31,31 @@ func newBuilder(db *gorm.DB, opts ...gen.DOOption) builder { _builder.UpdatedAt = field.NewTime(tableName, "updated_at") _builder.DeletedAt = field.NewUint(tableName, "deleted_at") _builder.ID = field.NewInt64(tableName, "id") + _builder.RepositoryID = field.NewInt64(tableName, "repository_id") + _builder.Active = field.NewBool(tableName, "active") + _builder.ScmCredentialType = field.NewField(tableName, "scm_credential_type") + _builder.ScmToken = field.NewString(tableName, "scm_token") + _builder.ScmSshKey = field.NewString(tableName, "scm_ssh_key") + _builder.ScmUsername = field.NewString(tableName, "scm_username") + _builder.ScmPassword = field.NewString(tableName, "scm_password") + _builder.ScmRepository = field.NewString(tableName, "scm_repository") + _builder.ScmBranch = field.NewString(tableName, "scm_branch") + _builder.ScmDepth = field.NewInt(tableName, "scm_depth") + _builder.ScmSubmodule = field.NewBool(tableName, "scm_submodule") + _builder.BuildkitInsecureRegistries = field.NewString(tableName, "buildkit_insecure_registries") + _builder.BuildkitContext = field.NewString(tableName, "buildkit_context") + _builder.BuildkitDockerfile = field.NewString(tableName, "buildkit_dockerfile") + _builder.BuildkitPlatforms = field.NewString(tableName, "buildkit_platforms") + _builder.Repository = builderBelongsToRepository{ + db: db.Session(&gorm.Session{}), + + RelationField: field.NewRelation("Repository", "models.Repository"), + Namespace: struct { + field.RelationField + }{ + RelationField: field.NewRelation("Repository.Namespace", "models.Namespace"), + }, + } _builder.fillFieldMap() @@ -40,11 +65,27 @@ func newBuilder(db *gorm.DB, opts ...gen.DOOption) builder { type builder struct { builderDo builderDo - ALL field.Asterisk - CreatedAt field.Time - UpdatedAt field.Time - DeletedAt field.Uint - ID field.Int64 + ALL field.Asterisk + CreatedAt field.Time + UpdatedAt field.Time + DeletedAt field.Uint + ID field.Int64 + RepositoryID field.Int64 + Active field.Bool + ScmCredentialType field.Field + ScmToken field.String + ScmSshKey field.String + ScmUsername field.String + ScmPassword field.String + ScmRepository field.String + ScmBranch field.String + ScmDepth field.Int + ScmSubmodule field.Bool + BuildkitInsecureRegistries field.String + BuildkitContext field.String + BuildkitDockerfile field.String + BuildkitPlatforms field.String + Repository builderBelongsToRepository fieldMap map[string]field.Expr } @@ -65,6 +106,21 @@ func (b *builder) updateTableName(table string) *builder { b.UpdatedAt = field.NewTime(table, "updated_at") b.DeletedAt = field.NewUint(table, "deleted_at") b.ID = field.NewInt64(table, "id") + b.RepositoryID = field.NewInt64(table, "repository_id") + b.Active = field.NewBool(table, "active") + b.ScmCredentialType = field.NewField(table, "scm_credential_type") + b.ScmToken = field.NewString(table, "scm_token") + b.ScmSshKey = field.NewString(table, "scm_ssh_key") + b.ScmUsername = field.NewString(table, "scm_username") + b.ScmPassword = field.NewString(table, "scm_password") + b.ScmRepository = field.NewString(table, "scm_repository") + b.ScmBranch = field.NewString(table, "scm_branch") + b.ScmDepth = field.NewInt(table, "scm_depth") + b.ScmSubmodule = field.NewBool(table, "scm_submodule") + b.BuildkitInsecureRegistries = field.NewString(table, "buildkit_insecure_registries") + b.BuildkitContext = field.NewString(table, "buildkit_context") + b.BuildkitDockerfile = field.NewString(table, "buildkit_dockerfile") + b.BuildkitPlatforms = field.NewString(table, "buildkit_platforms") b.fillFieldMap() @@ -89,11 +145,27 @@ func (b *builder) GetFieldByName(fieldName string) (field.OrderExpr, bool) { } func (b *builder) fillFieldMap() { - b.fieldMap = make(map[string]field.Expr, 4) + b.fieldMap = make(map[string]field.Expr, 20) b.fieldMap["created_at"] = b.CreatedAt b.fieldMap["updated_at"] = b.UpdatedAt b.fieldMap["deleted_at"] = b.DeletedAt b.fieldMap["id"] = b.ID + b.fieldMap["repository_id"] = b.RepositoryID + b.fieldMap["active"] = b.Active + b.fieldMap["scm_credential_type"] = b.ScmCredentialType + b.fieldMap["scm_token"] = b.ScmToken + b.fieldMap["scm_ssh_key"] = b.ScmSshKey + b.fieldMap["scm_username"] = b.ScmUsername + b.fieldMap["scm_password"] = b.ScmPassword + b.fieldMap["scm_repository"] = b.ScmRepository + b.fieldMap["scm_branch"] = b.ScmBranch + b.fieldMap["scm_depth"] = b.ScmDepth + b.fieldMap["scm_submodule"] = b.ScmSubmodule + b.fieldMap["buildkit_insecure_registries"] = b.BuildkitInsecureRegistries + b.fieldMap["buildkit_context"] = b.BuildkitContext + b.fieldMap["buildkit_dockerfile"] = b.BuildkitDockerfile + b.fieldMap["buildkit_platforms"] = b.BuildkitPlatforms + } func (b builder) clone(db *gorm.DB) builder { @@ -106,6 +178,81 @@ func (b builder) replaceDB(db *gorm.DB) builder { return b } +type builderBelongsToRepository struct { + db *gorm.DB + + field.RelationField + + Namespace struct { + field.RelationField + } +} + +func (a builderBelongsToRepository) Where(conds ...field.Expr) *builderBelongsToRepository { + if len(conds) == 0 { + return &a + } + + exprs := make([]clause.Expression, 0, len(conds)) + for _, cond := range conds { + exprs = append(exprs, cond.BeCond().(clause.Expression)) + } + a.db = a.db.Clauses(clause.Where{Exprs: exprs}) + return &a +} + +func (a builderBelongsToRepository) WithContext(ctx context.Context) *builderBelongsToRepository { + a.db = a.db.WithContext(ctx) + return &a +} + +func (a builderBelongsToRepository) Session(session *gorm.Session) *builderBelongsToRepository { + a.db = a.db.Session(session) + return &a +} + +func (a builderBelongsToRepository) Model(m *models.Builder) *builderBelongsToRepositoryTx { + return &builderBelongsToRepositoryTx{a.db.Model(m).Association(a.Name())} +} + +type builderBelongsToRepositoryTx struct{ tx *gorm.Association } + +func (a builderBelongsToRepositoryTx) Find() (result *models.Repository, err error) { + return result, a.tx.Find(&result) +} + +func (a builderBelongsToRepositoryTx) Append(values ...*models.Repository) (err error) { + targetValues := make([]interface{}, len(values)) + for i, v := range values { + targetValues[i] = v + } + return a.tx.Append(targetValues...) +} + +func (a builderBelongsToRepositoryTx) Replace(values ...*models.Repository) (err error) { + targetValues := make([]interface{}, len(values)) + for i, v := range values { + targetValues[i] = v + } + return a.tx.Replace(targetValues...) +} + +func (a builderBelongsToRepositoryTx) Delete(values ...*models.Repository) (err error) { + targetValues := make([]interface{}, len(values)) + for i, v := range values { + targetValues[i] = v + } + return a.tx.Delete(targetValues...) +} + +func (a builderBelongsToRepositoryTx) Clear() error { + return a.tx.Clear() +} + +func (a builderBelongsToRepositoryTx) Count() int64 { + return a.tx.Count() +} + type builderDo struct{ gen.DO } func (b builderDo) Debug() *builderDo { diff --git a/pkg/dal/query/gen.go b/pkg/dal/query/gen.go index a7c035fc..f2b73617 100644 --- a/pkg/dal/query/gen.go +++ b/pkg/dal/query/gen.go @@ -24,7 +24,7 @@ var ( Blob *blob BlobUpload *blobUpload Builder *builder - BuilderLog *builderLog + BuilderRunner *builderRunner CasbinRule *casbinRule DaemonLog *daemonLog Namespace *namespace @@ -45,7 +45,7 @@ func SetDefault(db *gorm.DB, opts ...gen.DOOption) { Blob = &Q.Blob BlobUpload = &Q.BlobUpload Builder = &Q.Builder - BuilderLog = &Q.BuilderLog + BuilderRunner = &Q.BuilderRunner CasbinRule = &Q.CasbinRule DaemonLog = &Q.DaemonLog Namespace = &Q.Namespace @@ -67,7 +67,7 @@ func Use(db *gorm.DB, opts ...gen.DOOption) *Query { Blob: newBlob(db, opts...), BlobUpload: newBlobUpload(db, opts...), Builder: newBuilder(db, opts...), - BuilderLog: newBuilderLog(db, opts...), + BuilderRunner: newBuilderRunner(db, opts...), CasbinRule: newCasbinRule(db, opts...), DaemonLog: newDaemonLog(db, opts...), Namespace: newNamespace(db, opts...), @@ -90,7 +90,7 @@ type Query struct { Blob blob BlobUpload blobUpload Builder builder - BuilderLog builderLog + BuilderRunner builderRunner CasbinRule casbinRule DaemonLog daemonLog Namespace namespace @@ -114,7 +114,7 @@ func (q *Query) clone(db *gorm.DB) *Query { Blob: q.Blob.clone(db), BlobUpload: q.BlobUpload.clone(db), Builder: q.Builder.clone(db), - BuilderLog: q.BuilderLog.clone(db), + BuilderRunner: q.BuilderRunner.clone(db), CasbinRule: q.CasbinRule.clone(db), DaemonLog: q.DaemonLog.clone(db), Namespace: q.Namespace.clone(db), @@ -145,7 +145,7 @@ func (q *Query) ReplaceDB(db *gorm.DB) *Query { Blob: q.Blob.replaceDB(db), BlobUpload: q.BlobUpload.replaceDB(db), Builder: q.Builder.replaceDB(db), - BuilderLog: q.BuilderLog.replaceDB(db), + BuilderRunner: q.BuilderRunner.replaceDB(db), CasbinRule: q.CasbinRule.replaceDB(db), DaemonLog: q.DaemonLog.replaceDB(db), Namespace: q.Namespace.replaceDB(db), @@ -166,7 +166,7 @@ type queryCtx struct { Blob *blobDo BlobUpload *blobUploadDo Builder *builderDo - BuilderLog *builderLogDo + BuilderRunner *builderRunnerDo CasbinRule *casbinRuleDo DaemonLog *daemonLogDo Namespace *namespaceDo @@ -187,7 +187,7 @@ func (q *Query) WithContext(ctx context.Context) *queryCtx { Blob: q.Blob.WithContext(ctx), BlobUpload: q.BlobUpload.WithContext(ctx), Builder: q.Builder.WithContext(ctx), - BuilderLog: q.BuilderLog.WithContext(ctx), + BuilderRunner: q.BuilderRunner.WithContext(ctx), CasbinRule: q.CasbinRule.WithContext(ctx), DaemonLog: q.DaemonLog.WithContext(ctx), Namespace: q.Namespace.WithContext(ctx), diff --git a/pkg/handlers/builders/builders_post.go b/pkg/handlers/builders/builders_post.go new file mode 100644 index 00000000..324af430 --- /dev/null +++ b/pkg/handlers/builders/builders_post.go @@ -0,0 +1,130 @@ +// Copyright 2023 sigma +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package builders + +import ( + "errors" + "fmt" + "net/http" + "strconv" + + "github.com/labstack/echo/v4" + "github.com/rs/zerolog/log" + "gorm.io/gorm" + + "github.com/go-sigma/sigma/pkg/consts" + "github.com/go-sigma/sigma/pkg/dal/models" + "github.com/go-sigma/sigma/pkg/dal/query" + "github.com/go-sigma/sigma/pkg/types" + "github.com/go-sigma/sigma/pkg/types/enums" + "github.com/go-sigma/sigma/pkg/utils" + "github.com/go-sigma/sigma/pkg/utils/ptr" + "github.com/go-sigma/sigma/pkg/xerrors" +) + +// PostBuilder handles the post builder request +// @Summary Create a builder for repository +// @Tags Builder +// @security BasicAuth +// @Accept json +// @Produce json +// @Router /builders [post] +// @Param repository_id query int64 true "create builder for repository" +// @Param message body types.PostBuilderRequestSwagger true "Builder object" +// @Success 201 +// @Failure 400 {object} xerrors.ErrCode +// @Failure 404 {object} xerrors.ErrCode +// @Failure 500 {object} xerrors.ErrCode +func (h *handlers) PostBuilder(c echo.Context) error { + ctx := log.Logger.WithContext(c.Request().Context()) + + iuser := c.Get(consts.ContextUser) + if iuser == nil { + log.Error().Msg("Get user from header failed") + return xerrors.NewHTTPError(c, xerrors.HTTPErrCodeUnauthorized) + } + user, ok := iuser.(*models.User) + if !ok { + log.Error().Msg("Convert user from header failed") + return xerrors.NewHTTPError(c, xerrors.HTTPErrCodeUnauthorized) + } + + var req types.PostBuilderRequest + err := utils.BindValidate(c, &req) + if err != nil { + log.Error().Err(err).Msg("Bind and validate request body failed") + return xerrors.NewHTTPError(c, xerrors.HTTPErrCodeBadRequest, err.Error()) + } + + repositoryService := h.repositoryServiceFactory.New() + repositoryObj, err := repositoryService.Get(ctx, req.RepositoryID) + if err != nil { + if errors.Is(err, gorm.ErrRecordNotFound) { + log.Error().Err(err).Int64("id", req.RepositoryID).Msg("Repository not found") + return xerrors.NewHTTPError(c, xerrors.HTTPErrCodeNotFound, "Repository not found") + } + log.Error().Err(err).Int64("id", req.RepositoryID).Msg("Repository find failed") + return xerrors.NewHTTPError(c, xerrors.HTTPErrCodeInternalError, "Repository find failed") + } + + builderService := h.builderServiceFactory.New() + _, err = builderService.GetByRepositoryID(ctx, req.RepositoryID) + if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { + log.Error().Err(err).Int64("id", req.RepositoryID).Msg("Get builder by repository id failed") + return xerrors.NewHTTPError(c, xerrors.HTTPErrCodeInternalError, err.Error()) + } + if err == nil { + log.Error().Err(err).Int64("id", req.RepositoryID).Msg("Repository has been already create builder") + return xerrors.NewHTTPError(c, xerrors.HTTPErrCodeConflict, "Repository has been already create builder") + } + err = query.Q.Transaction(func(tx *query.Query) error { + builderService := h.builderServiceFactory.New(tx) + builderObj := &models.Builder{ + RepositoryID: req.RepositoryID, + ScmCredentialType: req.ScmCredentialType, + ScmToken: req.ScmToken, + ScmSshKey: req.ScmSshKey, + ScmUsername: req.ScmUsername, + ScmPassword: req.ScmPassword, // should encrypt the password + ScmRepository: req.ScmRepository, + ScmBranch: req.ScmBranch, + ScmDepth: req.ScmDepth, + ScmSubmodule: req.ScmSubmodule, + } + err = builderService.Create(ctx, builderObj) + if err != nil { + log.Error().Err(err).Int64("id", req.RepositoryID).Msg("Create builder for repository failed") + return xerrors.HTTPErrCodeInternalError.Detail("Create builder for repository failed") + } + auditService := h.auditServiceFactory.New(tx) + err = auditService.Create(ctx, &models.Audit{ + UserID: user.ID, + NamespaceID: ptr.Of(repositoryObj.NamespaceID), + Action: enums.AuditActionCreate, + ResourceType: enums.AuditResourceTypeBuilder, + Resource: strconv.FormatInt(builderObj.ID, 10), + ReqRaw: utils.MustMarshal(builderObj), + }) + if err != nil { + log.Error().Err(err).Msg("Create audit failed") + return xerrors.HTTPErrCodeInternalError.Detail(fmt.Sprintf("Create audit failed: %v", err)) + } + return nil + }) + if err != nil { + return xerrors.NewHTTPError(c, err.(xerrors.ErrCode)) + } + return c.NoContent(http.StatusCreated) +} diff --git a/pkg/handlers/builders/handler.go b/pkg/handlers/builders/handler.go new file mode 100644 index 00000000..10dd2976 --- /dev/null +++ b/pkg/handlers/builders/handler.go @@ -0,0 +1,101 @@ +// Copyright 2023 sigma +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package builders + +import ( + "path" + "reflect" + + "github.com/labstack/echo/v4" + + "github.com/go-sigma/sigma/pkg/consts" + "github.com/go-sigma/sigma/pkg/dal/dao" + rhandlers "github.com/go-sigma/sigma/pkg/handlers" + "github.com/go-sigma/sigma/pkg/middlewares" + "github.com/go-sigma/sigma/pkg/utils" +) + +// Handler is the interface for the builder handlers +type Handlers interface { + // PostBuilder handles the post builder request + PostBuilder(c echo.Context) error +} + +var _ Handlers = &handlers{} + +type handlers struct { + namespaceServiceFactory dao.NamespaceServiceFactory + repositoryServiceFactory dao.RepositoryServiceFactory + webhookServiceFactory dao.WebhookServiceFactory + auditServiceFactory dao.AuditServiceFactory + builderServiceFactory dao.BuilderServiceFactory +} + +type inject struct { + namespaceServiceFactory dao.NamespaceServiceFactory + repositoryServiceFactory dao.RepositoryServiceFactory + webhookServiceFactory dao.WebhookServiceFactory + auditServiceFactory dao.AuditServiceFactory + builderServiceFactory dao.BuilderServiceFactory +} + +// handlerNew creates a new instance of the builder handlers +func handlerNew(injects ...inject) Handlers { + namespaceServiceFactory := dao.NewNamespaceServiceFactory() + repositoryServiceFactory := dao.NewRepositoryServiceFactory() + webhookServiceFactory := dao.NewWebhookServiceFactory() + auditServiceFactory := dao.NewAuditServiceFactory() + builderServiceFactory := dao.NewBuilderServiceFactory() + if len(injects) > 0 { + ij := injects[0] + if ij.namespaceServiceFactory != nil { + namespaceServiceFactory = ij.namespaceServiceFactory + } + if ij.webhookServiceFactory != nil { + webhookServiceFactory = ij.webhookServiceFactory + } + if ij.auditServiceFactory != nil { + auditServiceFactory = ij.auditServiceFactory + } + if ij.builderServiceFactory != nil { + builderServiceFactory = ij.builderServiceFactory + } + if ij.repositoryServiceFactory != nil { + repositoryServiceFactory = ij.repositoryServiceFactory + } + } + return &handlers{ + namespaceServiceFactory: namespaceServiceFactory, + repositoryServiceFactory: repositoryServiceFactory, + webhookServiceFactory: webhookServiceFactory, + auditServiceFactory: auditServiceFactory, + builderServiceFactory: builderServiceFactory, + } +} + +type factory struct{} + +// Initialize initializes the namespace handlers +func (f factory) Initialize(e *echo.Echo) error { + builderGroup := e.Group(consts.APIV1+"/repositories/:repository_id/builders", middlewares.AuthWithConfig(middlewares.AuthConfig{})) + + webhookHandler := handlerNew() + builderGroup.POST("/", webhookHandler.PostBuilder) + return nil +} + +func init() { + utils.PanicIf(rhandlers.RegisterRouterFactory(path.Base(reflect.TypeOf(factory{}).PkgPath()), &factory{})) +} diff --git a/pkg/handlers/webhooks/handler.go b/pkg/handlers/webhooks/handler.go index 05a747ee..ed136f86 100644 --- a/pkg/handlers/webhooks/handler.go +++ b/pkg/handlers/webhooks/handler.go @@ -27,7 +27,7 @@ import ( "github.com/go-sigma/sigma/pkg/utils" ) -// Handler is the interface for the system handlers +// Handler is the interface for the webhook handlers type Handlers interface { // PostWebhook handles the post webhook request PostWebhook(c echo.Context) error @@ -63,7 +63,7 @@ type inject struct { auditServiceFactory dao.AuditServiceFactory } -// handlerNew creates a new instance of the distribution handlers +// handlerNew creates a new instance of the webhook handlers func handlerNew(injects ...inject) Handlers { namespaceServiceFactory := dao.NewNamespaceServiceFactory() webhookServiceFactory := dao.NewWebhookServiceFactory() diff --git a/pkg/types/builder.go b/pkg/types/builder.go index c5d6691b..3c899b08 100644 --- a/pkg/types/builder.go +++ b/pkg/types/builder.go @@ -18,7 +18,8 @@ import "github.com/go-sigma/sigma/pkg/types/enums" // Builder config for builder type Builder struct { - ID string `env:"ID,notEmpty"` + ID int64 `env:"ID,notEmpty"` + RunnerID int64 `env:"RUNNER_ID,notEmpty"` ScmCredentialType enums.ScmCredentialType `env:"SCM_CREDENTIAL_TYPE,notEmpty"` ScmSshKey string `env:"SCM_SSH_KEY"` @@ -29,12 +30,12 @@ type Builder struct { ScmRepository string `env:"SCM_REPOSITORY,notEmpty"` ScmBranch string `env:"SCM_BRANCH" envDefault:"main"` ScmDepth int `env:"SCM_DEPTH" envDefault:"0"` - ScmSubModule bool `env:"SCM_SUBMODULE" envDefault:"false"` + ScmSubmodule bool `env:"SCM_SUBMODULE" envDefault:"false"` - OciRegistryDomain string `env:"OCI_REGISTRY_DOMAIN,notEmpty"` - OciRegistryUsername string `env:"OCI_REGISTRY_USERNAME"` - OciRegistryPassword string `env:"OCI_REGISTRY_PASSWORD"` - OciName string `env:"OCI_NAME,notEmpty"` + OciRegistryDomain []string `env:"OCI_REGISTRY_DOMAIN" envSeparator:","` + OciRegistryUsername []string `env:"OCI_REGISTRY_USERNAME" envSeparator:","` + OciRegistryPassword []string `env:"OCI_REGISTRY_PASSWORD" envSeparator:","` + OciName string `env:"OCI_NAME,notEmpty"` BuildkitInsecureRegistries []string `env:"BUILDKIT_INSECURE_REGISTRIES" envSeparator:","` BuildkitCacheDir string `env:"BUILDKIT_CACHE_DIR" envDefault:"/tmp/buildkit"` @@ -42,3 +43,33 @@ type Builder struct { BuildkitDockerfile string `env:"BUILDKIT_DOCKERFILE" envDefault:"Dockerfile"` BuildkitPlatforms []enums.OciPlatform `env:"BUILDKIT_PLATFORMS" envSeparator:","` } + +// PostBuilderRequest ... +type PostBuilderRequest struct { + RepositoryID int64 `json:"repository_id" param:"repository_id" example:"10"` + + ScmCredentialType enums.ScmCredentialType `json:"scm_credential_type" example:"ssh"` + ScmSshKey string `json:"scm_ssh_key" example:"xxxx"` + ScmToken string `json:"scm_token" example:"xxxx"` + ScmUsername string `json:"scm_username" example:"sigma"` + ScmPassword string `json:"scm_password" example:"sigma"` + // ScmProvider enums.ScmProvider `json:"scm_provider"` + ScmRepository string `json:"scm_repository" example:"https://github.com/go-sigma/sigma.git"` + ScmBranch string `json:"scm_branch" example:"main"` + ScmDepth int `json:"scm_depth" example:"0"` + ScmSubmodule bool `json:"scm_submodule" example:"false"` +} + +// PostBuilderRequestSwagger ... +type PostBuilderRequestSwagger struct { + ScmCredentialType enums.ScmCredentialType `json:"scm_credential_type" example:"ssh"` + ScmSshKey string `json:"scm_ssh_key" example:"xxxx"` + ScmToken string `json:"scm_token" example:"xxxx"` + ScmUsername string `json:"scm_username" example:"sigma"` + ScmPassword string `json:"scm_password" example:"sigma"` + // ScmProvider enums.ScmProvider `json:"scm_provider"` + ScmRepository string `json:"scm_repository" example:"https://github.com/go-sigma/sigma.git"` + ScmBranch string `json:"scm_branch" example:"main"` + ScmDepth int `json:"scm_depth" example:"0"` + ScmSubmodule bool `json:"scm_submodule" example:"false"` +} diff --git a/pkg/types/daemon.go b/pkg/types/daemon.go index 2c2d605e..e50bdd49 100644 --- a/pkg/types/daemon.go +++ b/pkg/types/daemon.go @@ -54,6 +54,14 @@ type DaemonWebhookPayload struct { Payload []byte `json:"payload"` } +// DaemonBuilderPayload ... +type DaemonBuilderPayload struct { + Action enums.DaemonBuilderAction + ID int64 + RunnerID int64 + RepositoryID int64 +} + // PostDaemonRunRequest ... type PostDaemonRunRequest struct { NamespaceID int64 `json:"namespace_id,omitempty" query:"namespace_id" validate:"omitempty,number" example:"123"` diff --git a/pkg/types/enums/enums.go b/pkg/types/enums/enums.go index c159540d..153e0a9e 100644 --- a/pkg/types/enums/enums.go +++ b/pkg/types/enums/enums.go @@ -41,6 +41,15 @@ type Deploy string // ) type TaskCommonStatus string +// BuildStatus x ENUM( +// Success, +// Failed, +// Pending, +// Scheduling, +// Building, +// ) +type BuildStatus string + // Database x ENUM( // postgresql, // mysql, @@ -71,6 +80,7 @@ type WorkQueueType string // Gc, // GcRepository, // Webhook, +// Builder, // ) type Daemon string @@ -123,6 +133,7 @@ type AuditAction string // repository, // tag, // webhook, +// builder, // ) type AuditResourceType string @@ -178,3 +189,10 @@ type ScmProvider string // linux/arm/v6, // ) type OciPlatform string + +// DaemonBuilderAction x ENUM( +// Start, +// Restart, +// Stop, +// ) +type DaemonBuilderAction string diff --git a/pkg/types/enums/enums_enum.go b/pkg/types/enums/enums_enum.go index 14047a9d..216d4843 100644 --- a/pkg/types/enums/enums_enum.go +++ b/pkg/types/enums/enums_enum.go @@ -211,6 +211,8 @@ const ( AuditResourceTypeTag AuditResourceType = "tag" // AuditResourceTypeWebhook is a AuditResourceType of type webhook. AuditResourceTypeWebhook AuditResourceType = "webhook" + // AuditResourceTypeBuilder is a AuditResourceType of type builder. + AuditResourceTypeBuilder AuditResourceType = "builder" ) var ErrInvalidAuditResourceType = errors.New("not a valid AuditResourceType") @@ -232,6 +234,7 @@ var _AuditResourceTypeValue = map[string]AuditResourceType{ "repository": AuditResourceTypeRepository, "tag": AuditResourceTypeTag, "webhook": AuditResourceTypeWebhook, + "builder": AuditResourceTypeBuilder, } // ParseAuditResourceType attempts to convert a string to a AuditResourceType. @@ -291,6 +294,98 @@ func (x AuditResourceType) Value() (driver.Value, error) { return x.String(), nil } +const ( + // BuildStatusSuccess is a BuildStatus of type Success. + BuildStatusSuccess BuildStatus = "Success" + // BuildStatusFailed is a BuildStatus of type Failed. + BuildStatusFailed BuildStatus = "Failed" + // BuildStatusPending is a BuildStatus of type Pending. + BuildStatusPending BuildStatus = "Pending" + // BuildStatusScheduling is a BuildStatus of type Scheduling. + BuildStatusScheduling BuildStatus = "Scheduling" + // BuildStatusBuilding is a BuildStatus of type Building. + BuildStatusBuilding BuildStatus = "Building" +) + +var ErrInvalidBuildStatus = errors.New("not a valid BuildStatus") + +// String implements the Stringer interface. +func (x BuildStatus) String() string { + return string(x) +} + +// IsValid provides a quick way to determine if the typed value is +// part of the allowed enumerated values +func (x BuildStatus) IsValid() bool { + _, err := ParseBuildStatus(string(x)) + return err == nil +} + +var _BuildStatusValue = map[string]BuildStatus{ + "Success": BuildStatusSuccess, + "Failed": BuildStatusFailed, + "Pending": BuildStatusPending, + "Scheduling": BuildStatusScheduling, + "Building": BuildStatusBuilding, +} + +// ParseBuildStatus attempts to convert a string to a BuildStatus. +func ParseBuildStatus(name string) (BuildStatus, error) { + if x, ok := _BuildStatusValue[name]; ok { + return x, nil + } + return BuildStatus(""), fmt.Errorf("%s is %w", name, ErrInvalidBuildStatus) +} + +// MustParseBuildStatus converts a string to a BuildStatus, and panics if is not valid. +func MustParseBuildStatus(name string) BuildStatus { + val, err := ParseBuildStatus(name) + if err != nil { + panic(err) + } + return val +} + +var errBuildStatusNilPtr = errors.New("value pointer is nil") // one per type for package clashes + +// Scan implements the Scanner interface. +func (x *BuildStatus) Scan(value interface{}) (err error) { + if value == nil { + *x = BuildStatus("") + return + } + + // A wider range of scannable types. + // driver.Value values at the top of the list for expediency + switch v := value.(type) { + case string: + *x, err = ParseBuildStatus(v) + case []byte: + *x, err = ParseBuildStatus(string(v)) + case BuildStatus: + *x = v + case *BuildStatus: + if v == nil { + return errBuildStatusNilPtr + } + *x = *v + case *string: + if v == nil { + return errBuildStatusNilPtr + } + *x, err = ParseBuildStatus(*v) + default: + return errors.New("invalid type for BuildStatus") + } + + return +} + +// Value implements the driver Valuer interface. +func (x BuildStatus) Value() (driver.Value, error) { + return x.String(), nil +} + const ( // CacheTypeMemory is a CacheType of type memory. CacheTypeMemory CacheType = "memory" @@ -385,6 +480,8 @@ const ( DaemonGcRepository Daemon = "GcRepository" // DaemonWebhook is a Daemon of type Webhook. DaemonWebhook Daemon = "Webhook" + // DaemonBuilder is a Daemon of type Builder. + DaemonBuilder Daemon = "Builder" ) var ErrInvalidDaemon = errors.New("not a valid Daemon") @@ -407,6 +504,7 @@ var _DaemonValue = map[string]Daemon{ "Gc": DaemonGc, "GcRepository": DaemonGcRepository, "Webhook": DaemonWebhook, + "Builder": DaemonBuilder, } // ParseDaemon attempts to convert a string to a Daemon. @@ -466,6 +564,92 @@ func (x Daemon) Value() (driver.Value, error) { return x.String(), nil } +const ( + // DaemonBuilderActionStart is a DaemonBuilderAction of type Start. + DaemonBuilderActionStart DaemonBuilderAction = "Start" + // DaemonBuilderActionRestart is a DaemonBuilderAction of type Restart. + DaemonBuilderActionRestart DaemonBuilderAction = "Restart" + // DaemonBuilderActionStop is a DaemonBuilderAction of type Stop. + DaemonBuilderActionStop DaemonBuilderAction = "Stop" +) + +var ErrInvalidDaemonBuilderAction = errors.New("not a valid DaemonBuilderAction") + +// String implements the Stringer interface. +func (x DaemonBuilderAction) String() string { + return string(x) +} + +// IsValid provides a quick way to determine if the typed value is +// part of the allowed enumerated values +func (x DaemonBuilderAction) IsValid() bool { + _, err := ParseDaemonBuilderAction(string(x)) + return err == nil +} + +var _DaemonBuilderActionValue = map[string]DaemonBuilderAction{ + "Start": DaemonBuilderActionStart, + "Restart": DaemonBuilderActionRestart, + "Stop": DaemonBuilderActionStop, +} + +// ParseDaemonBuilderAction attempts to convert a string to a DaemonBuilderAction. +func ParseDaemonBuilderAction(name string) (DaemonBuilderAction, error) { + if x, ok := _DaemonBuilderActionValue[name]; ok { + return x, nil + } + return DaemonBuilderAction(""), fmt.Errorf("%s is %w", name, ErrInvalidDaemonBuilderAction) +} + +// MustParseDaemonBuilderAction converts a string to a DaemonBuilderAction, and panics if is not valid. +func MustParseDaemonBuilderAction(name string) DaemonBuilderAction { + val, err := ParseDaemonBuilderAction(name) + if err != nil { + panic(err) + } + return val +} + +var errDaemonBuilderActionNilPtr = errors.New("value pointer is nil") // one per type for package clashes + +// Scan implements the Scanner interface. +func (x *DaemonBuilderAction) Scan(value interface{}) (err error) { + if value == nil { + *x = DaemonBuilderAction("") + return + } + + // A wider range of scannable types. + // driver.Value values at the top of the list for expediency + switch v := value.(type) { + case string: + *x, err = ParseDaemonBuilderAction(v) + case []byte: + *x, err = ParseDaemonBuilderAction(string(v)) + case DaemonBuilderAction: + *x = v + case *DaemonBuilderAction: + if v == nil { + return errDaemonBuilderActionNilPtr + } + *x = *v + case *string: + if v == nil { + return errDaemonBuilderActionNilPtr + } + *x, err = ParseDaemonBuilderAction(*v) + default: + return errors.New("invalid type for DaemonBuilderAction") + } + + return +} + +// Value implements the driver Valuer interface. +func (x DaemonBuilderAction) Value() (driver.Value, error) { + return x.String(), nil +} + const ( // DatabasePostgresql is a Database of type postgresql. DatabasePostgresql Database = "postgresql"