Skip to content

Commit

Permalink
Update Makefile for building sparkctl (kubeflow#2119)
Browse files Browse the repository at this point in the history
Signed-off-by: Yi Chen <[email protected]>
  • Loading branch information
ChenYi015 authored Aug 22, 2024
1 parent 8fcda12 commit 4bc6e89
Show file tree
Hide file tree
Showing 3 changed files with 107 additions and 103 deletions.
150 changes: 76 additions & 74 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -13,20 +13,20 @@ SHELL = /usr/bin/env bash -o pipefail
.SHELLFLAGS = -ec

# Version information.
VERSION=$(shell cat VERSION | sed "s/^v//")
BUILD_DATE = $(shell date -u +"%Y-%m-%dT%H:%M:%S%:z")
GIT_COMMIT = $(shell git rev-parse HEAD)
GIT_TAG = $(shell if [ -z "`git status --porcelain`" ]; then git describe --exact-match --tags HEAD 2>/dev/null; fi)
GIT_TREE_STATE = $(shell if [ -z "`git status --porcelain`" ]; then echo "clean" ; else echo "dirty"; fi)
GIT_SHA = $(shell git rev-parse --short HEAD || echo "HEAD")
GIT_VERSION = ${VERSION}-${GIT_SHA}

REPO=github.com/kubeflow/spark-operator
SPARK_OPERATOR_GOPATH=/go/src/github.com/kubeflow/spark-operator
SPARK_OPERATOR_CHART_PATH=charts/spark-operator-chart
DEP_VERSION:=`grep DEP_VERSION= Dockerfile | awk -F\" '{print $$2}'`
BUILDER=`grep "FROM golang:" Dockerfile | awk '{print $$2}'`
UNAME:=`uname | tr '[:upper:]' '[:lower:]'`
VERSION ?= $(shell cat VERSION | sed "s/^v//")
BUILD_DATE := $(shell date -u +"%Y-%m-%dT%H:%M:%S%:z")
GIT_COMMIT := $(shell git rev-parse HEAD)
GIT_TAG := $(shell if [ -z "`git status --porcelain`" ]; then git describe --exact-match --tags HEAD 2>/dev/null; fi)
GIT_TREE_STATE := $(shell if [ -z "`git status --porcelain`" ]; then echo "clean" ; else echo "dirty"; fi)
GIT_SHA := $(shell git rev-parse --short HEAD || echo "HEAD")
GIT_VERSION := ${VERSION}+${GIT_SHA}

REPO := github.com/kubeflow/spark-operator
SPARK_OPERATOR_GOPATH := /go/src/github.com/kubeflow/spark-operator
SPARK_OPERATOR_CHART_PATH := charts/spark-operator-chart
DEP_VERSION := `grep DEP_VERSION= Dockerfile | awk -F\" '{print $$2}'`
BUILDER := `grep "FROM golang:" Dockerfile | awk '{print $$2}'`
UNAME := `uname | tr '[:upper:]' '[:lower:]'`

# CONTAINER_TOOL defines the container tool to be used for building images.
# Be aware that the target commands are only tested with Docker which is
Expand All @@ -45,8 +45,34 @@ KIND_CLUSTER_NAME ?= spark-operator
KIND_CONFIG_FILE ?= charts/spark-operator-chart/ci/kind-config.yaml
KIND_KUBE_CONFIG ?= $(HOME)/.kube/config

## Location to install binaries
LOCALBIN ?= $(shell pwd)/bin

## Versions
KUSTOMIZE_VERSION ?= v5.4.1
CONTROLLER_TOOLS_VERSION ?= v0.15.0
KIND_VERSION ?= v0.23.0
ENVTEST_VERSION ?= release-0.18
# ENVTEST_K8S_VERSION refers to the version of kubebuilder assets to be downloaded by envtest binary.
ENVTEST_K8S_VERSION = 1.29.3
ENVTEST_K8S_VERSION ?= 1.29.3
GOLANGCI_LINT_VERSION ?= v1.57.2
GEN_CRD_API_REFERENCE_DOCS_VERSION ?= v0.3.0
HELM_VERSION ?= v3.15.3
HELM_UNITTEST_VERSION ?= 0.5.1
HELM_DOCS_VERSION ?= v1.14.2

## Binaries
SPARK_OPERATOR ?= $(LOCALBIN)/spark-operator
SPARKCTL ?= $(LOCALBIN)/sparkctl
KUBECTL ?= kubectl
KUSTOMIZE ?= $(LOCALBIN)/kustomize-$(KUSTOMIZE_VERSION)
CONTROLLER_GEN ?= $(LOCALBIN)/controller-gen-$(CONTROLLER_TOOLS_VERSION)
KIND ?= $(LOCALBIN)/kind-$(KIND_VERSION)
ENVTEST ?= $(LOCALBIN)/setup-envtest-$(ENVTEST_VERSION)
GOLANGCI_LINT ?= $(LOCALBIN)/golangci-lint-$(GOLANGCI_LINT_VERSION)
GEN_CRD_API_REFERENCE_DOCS ?= $(LOCALBIN)/gen-crd-api-reference-docs-$(GEN_CRD_API_REFERENCE_DOCS_VERSION)
HELM ?= $(LOCALBIN)/helm-$(HELM_VERSION)
HELM_DOCS ?= $(LOCALBIN)/helm-docs-$(HELM_DOCS_VERSION)

##@ General

Expand All @@ -68,6 +94,12 @@ help: ## Display this help.
.PHONY: version
version: ## Print version information.
@echo "Version: ${VERSION}"
@echo "Build Date: ${BUILD_DATE}"
@echo "Git Commit: ${GIT_COMMIT}"
@echo "Git Tag: ${GIT_TAG}"
@echo "Git Tree State: ${GIT_TREE_STATE}"
@echo "Git SHA: ${GIT_SHA}"
@echo "Git Version: ${GIT_VERSION}"

##@ Development

Expand All @@ -83,8 +115,8 @@ generate: controller-gen ## Generate code containing DeepCopy, DeepCopyInto, and
update-crd: manifests ## Update CRD files in the Helm chart.
cp config/crd/bases/* charts/spark-operator-chart/crds/

.PHONY: clean
clean: ## Clean up caches and output.
.PHONY: go-clean
go-clean: ## Clean up caches and output.
@echo "cleaning up caches and output"
go clean -cache -testcache -r -x 2>&1 >/dev/null
-rm -rf _output
Expand Down Expand Up @@ -128,43 +160,31 @@ e2e-test: envtest ## Run the e2e tests against a Kind k8s instance that is spun
##@ Build

override LDFLAGS += \
-X ${REPO}.version=v${VERSION} \
-X ${REPO}.version=${GIT_VERSION} \
-X ${REPO}.buildDate=${BUILD_DATE} \
-X ${REPO}.gitCommit=${GIT_COMMIT} \
-X ${REPO}.gitTreeState=${GIT_TREE_STATE} \
-extldflags "-static"

.PHONY: build-operator
build-operator: ## Build Spark operator
go build -o bin/spark-operator -ldflags '${LDFLAGS}' cmd/main.go
build-operator: ## Build Spark operator.
echo "Building spark-operator binary..."
go build -o $(SPARK_OPERATOR) -ldflags '${LDFLAGS}' cmd/main.go

.PHONY: build-sparkctl
build-sparkctl: ## Build sparkctl binary.
[ ! -f "sparkctl/sparkctl-darwin-amd64" ] || [ ! -f "sparkctl/sparkctl-linux-amd64" ] && \
echo building using $(BUILDER) && \
docker run -w $(SPARK_OPERATOR_GOPATH) \
-v $$(pwd):$(SPARK_OPERATOR_GOPATH) $(BUILDER) sh -c \
"apk add --no-cache bash git && \
cd sparkctl && \
bash build.sh" || true
echo "Building sparkctl binary..."
CGO_ENABLED=0 go build -o $(SPARKCTL) -buildvcs=false sparkctl/main.go

.PHONY: install-sparkctl
install-sparkctl: | sparkctl/sparkctl-darwin-amd64 sparkctl/sparkctl-linux-amd64 ## Install sparkctl binary.
@if [ "$(UNAME)" = "linux" ]; then \
echo "installing linux binary to /usr/local/bin/sparkctl"; \
sudo cp sparkctl/sparkctl-linux-amd64 /usr/local/bin/sparkctl; \
sudo chmod +x /usr/local/bin/sparkctl; \
elif [ "$(UNAME)" = "darwin" ]; then \
echo "installing macOS binary to /usr/local/bin/sparkctl"; \
cp sparkctl/sparkctl-darwin-amd64 /usr/local/bin/sparkctl; \
chmod +x /usr/local/bin/sparkctl; \
else \
echo "$(UNAME) not supported"; \
fi
install-sparkctl: build-sparkctl ## Install sparkctl binary.
echo "Installing sparkctl binary to /usr/local/bin..."; \
sudo cp $(SPARKCTL) /usr/local/bin

.PHONY: clean-sparkctl
clean-sparkctl: ## Clean sparkctl binary.
rm -f sparkctl/sparkctl-darwin-amd64 sparkctl/sparkctl-linux-amd64
.PHONY: clean
clean: ## Clean spark-operator and sparktcl binaries.
rm -f $(SPARK_OPERATOR)
rm -f $(SPARKCTL)

.PHONY: build-api-docs
build-api-docs: gen-crd-api-reference-docs ## Build api documentaion.
Expand Down Expand Up @@ -202,12 +222,12 @@ docker-buildx: ## Build and push docker image for the operator for cross-platfor
##@ Helm

.PHONY: detect-crds-drift
detect-crds-drift:
diff -q charts/spark-operator-chart/crds config/crd/bases
detect-crds-drift: manifests ## Detect CRD drift.
diff -q $(SPARK_OPERATOR_CHART_PATH)/crds config/crd/bases

.PHONY: helm-unittest
helm-unittest: helm-unittest-plugin ## Run Helm chart unittests.
helm unittest charts/spark-operator-chart --strict --file "tests/**/*_test.yaml"
$(HELM) unittest $(SPARK_OPERATOR_CHART_PATH) --strict --file "tests/**/*_test.yaml"

.PHONY: helm-lint
helm-lint: ## Run Helm chart lint test.
Expand All @@ -231,7 +251,7 @@ kind-create-cluster: kind ## Create a kind cluster for integration tests.

.PHONY: kind-load-image
kind-load-image: kind-create-cluster docker-build ## Load the image into the kind cluster.
kind load docker-image --name $(KIND_CLUSTER_NAME) $(IMAGE)
$(KIND) load docker-image --name $(KIND_CLUSTER_NAME) $(IMAGE)

.PHONY: kind-delete-custer
kind-delete-custer: kind ## Delete the created kind cluster.
Expand All @@ -257,33 +277,9 @@ undeploy: kustomize ## Undeploy controller from the K8s cluster specified in ~/.

##@ Dependencies

## Location to install dependencies to
LOCALBIN ?= $(shell pwd)/bin
$(LOCALBIN):
mkdir -p $(LOCALBIN)

## Tool Binaries
KUBECTL ?= kubectl
KUSTOMIZE ?= $(LOCALBIN)/kustomize-$(KUSTOMIZE_VERSION)
CONTROLLER_GEN ?= $(LOCALBIN)/controller-gen-$(CONTROLLER_TOOLS_VERSION)
KIND ?= $(LOCALBIN)/kind-$(KIND_VERSION)
ENVTEST ?= $(LOCALBIN)/setup-envtest-$(ENVTEST_VERSION)
GOLANGCI_LINT = $(LOCALBIN)/golangci-lint-$(GOLANGCI_LINT_VERSION)
GEN_CRD_API_REFERENCE_DOCS ?= $(LOCALBIN)/gen-crd-api-reference-docs-$(GEN_CRD_API_REFERENCE_DOCS_VERSION)
HELM ?= helm
HELM_UNITTEST ?= unittest
HELM_DOCS ?= $(LOCALBIN)/helm-docs-$(HELM_DOCS_VERSION)

## Tool Versions
KUSTOMIZE_VERSION ?= v5.4.1
CONTROLLER_TOOLS_VERSION ?= v0.15.0
KIND_VERSION ?= v0.23.0
ENVTEST_VERSION ?= release-0.18
GOLANGCI_LINT_VERSION ?= v1.57.2
GEN_CRD_API_REFERENCE_DOCS_VERSION ?= v0.3.0
HELM_UNITTEST_VERSION ?= 0.5.1
HELM_DOCS_VERSION ?= v1.14.2

.PHONY: kustomize
kustomize: $(KUSTOMIZE) ## Download kustomize locally if necessary.
$(KUSTOMIZE): $(LOCALBIN)
Expand Down Expand Up @@ -314,15 +310,21 @@ gen-crd-api-reference-docs: $(GEN_CRD_API_REFERENCE_DOCS) ## Download gen-crd-ap
$(GEN_CRD_API_REFERENCE_DOCS): $(LOCALBIN)
$(call go-install-tool,$(GEN_CRD_API_REFERENCE_DOCS),github.com/ahmetb/gen-crd-api-reference-docs,$(GEN_CRD_API_REFERENCE_DOCS_VERSION))

.PHONY: helm
helm: $(HELM) ## Download helm locally if necessary.
$(HELM): $(LOCALBIN)
$(call go-install-tool,$(HELM),helm.sh/helm/v3/cmd/helm,$(HELM_VERSION))

.PHONY: helm-unittest-plugin
helm-unittest-plugin: ## Download helm unittest plugin locally if necessary.
if [ -z "$(shell helm plugin list | grep unittest)" ]; then \
helm-unittest-plugin: helm ## Download helm unittest plugin locally if necessary.
if [ -z "$(shell $(HELM) plugin list | grep unittest)" ]; then \
echo "Installing helm unittest plugin"; \
helm plugin install https://github.com/helm-unittest/helm-unittest.git --version $(HELM_UNITTEST_VERSION); \
$(HELM) plugin install https://github.com/helm-unittest/helm-unittest.git --version $(HELM_UNITTEST_VERSION); \
fi

.PHONY: helm-docs-plugin
helm-docs-plugin: ## Download helm-docs plugin locally if necessary.
helm-docs-plugin: $(HELM_DOCS) ## Download helm-docs plugin locally if necessary.
$(HELM_DOCS): $(LOCALBIN)
$(call go-install-tool,$(HELM_DOCS),github.com/norwoodj/helm-docs/cmd/helm-docs,$(HELM_DOCS_VERSION))

# go-install-tool will 'go install' any package with custom target and name of binary, if it doesn't exist
Expand Down
33 changes: 31 additions & 2 deletions sparkctl/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,39 @@

`sparkctl` is a command-line tool of the Spark Operator for creating, listing, checking status of, getting logs of, and deleting `SparkApplication`s. It can also do port forwarding from a local port to the Spark web UI port for accessing the Spark web UI on the driver. Each function is implemented as a sub-command of `sparkctl`.

To build `sparkctl`, make sure you followed build steps [here](https://github.com/kubeflow/spark-operator/blob/master/docs/developer-guide.md#build-the-operator) and have all the dependencies, then run the following command from within `sparkctl/`:
To build the `sparkctl` binary, run the following command in the root directory of the project:

```bash
go build -o sparkctl
make build-sparkctl
```

Then the `sparkctl` binary can be found in the `bin` directory:

```bash
$ bin/sparkctl --help
sparkctl is the command-line tool for working with the Spark Operator. It supports creating, deleting and
checking status of SparkApplication objects. It also supports fetching application logs.

Usage:
sparkctl [command]

Available Commands:
completion Generate the autocompletion script for the specified shell
create Create a SparkApplication object
delete Delete a SparkApplication object
event Shows SparkApplication events
forward Start to forward a local port to the remote port of the driver UI
help Help about any command
list List SparkApplication objects
log log is a sub-command of sparkctl that fetches logs of a Spark application.
status Check status of a SparkApplication

Flags:
-h, --help help for sparkctl
-k, --kubeconfig string The path to the local Kubernetes configuration file (default "/Users/chenyi/.kube/config")
-n, --namespace string The namespace in which the SparkApplication is to be created (default "default")

Use "sparkctl [command] --help" for more information about a command.
```

## Flags
Expand Down
27 changes: 0 additions & 27 deletions sparkctl/build.sh

This file was deleted.

0 comments on commit 4bc6e89

Please sign in to comment.