Skip to content

Commit

Permalink
[DPE-5679] Use jmx_exporter 0.15 to avoid conflict with spark-metrics (
Browse files Browse the repository at this point in the history
  • Loading branch information
welpaolo authored Nov 18, 2024
1 parent cf69d6d commit 4e008ae
Show file tree
Hide file tree
Showing 7 changed files with 28 additions and 12 deletions.
12 changes: 10 additions & 2 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -73,12 +73,21 @@ jobs:
- integration-tls
- integration-azure
- integration-logs
runner:
- ubuntu-22.04
- [self-hosted, linux, AMD64, X64, large, jammy]
exclude:
- tox-environments: integration-azure # Don't test azure twice
runner: [self-hosted, linux, AMD64, X64, large, jammy]
- tox-environments: integration-charm # Don't test charm twice
runner: [self-hosted, linux, AMD64, X64, large, jammy]

name: ${{ matrix.tox-environments }}
needs:
- lint
- unit-test
- build
runs-on: ubuntu-22.04
runs-on: ${{ matrix.runner }}
timeout-minutes: 120
steps:
- name: Checkout
Expand Down Expand Up @@ -135,4 +144,3 @@ jobs:
AZURE_STORAGE_KEY: ${{ secrets.AZURE_STORAGE_KEY }}
CI_PACKED_CHARMS: ${{ needs.build.outputs.charms }}
run: tox run -e ${{ matrix.tox-environments }} -- -m '${{ steps.select-tests.outputs.mark_expression }}'

2 changes: 1 addition & 1 deletion metadata.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ resources:
spark-history-server-image:
type: oci-image
description: OCI image for Spark History Server
upstream-source: ghcr.io/canonical/charmed-spark@sha256:75a01dfca493b5a457fc7d3258daba3e9891f0408f0097f1fd189100d5de4891 # spark-version: 3.4.2 revision: 3a877ab9facaf0a648f529ec1628a3d38c37208e
upstream-source: ghcr.io/canonical/charmed-spark@sha256:1d9949dc7266d814e6483f8d9ffafeff32f66bb9939e0ab29ccfd9d5003a583a # spark-version: 3.4.2 revision: c9d47a39f3c2a214dbe4bd0b88dfcc6c37b651b6

requires:
s3-credentials:
Expand Down
2 changes: 1 addition & 1 deletion src/core/workload.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def jmx_prometheus_javaagent(self):
Used for scraping and exposing mBeans of a JMX target.
"""
return self.lib_path / "jmx_prometheus_javaagent-0.20.0.jar"
return self.lib_path / "jmx_prometheus_javaagent-0.15.0.jar"

@property
def jmx_prometheus_config(self):
Expand Down
10 changes: 6 additions & 4 deletions src/workload.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,15 +45,17 @@ def envs(self):

self._envs = self.from_env(self.read(self.ENV_FILE)) if self.exists(self.ENV_FILE) else {}

self._envs["SPARK_DAEMON_JAVA_OPTS"] = (
f"-javaagent:{self.paths.jmx_prometheus_javaagent}={JMX_EXPORTER_PORT}:{self.paths.jmx_prometheus_config}"
)

return self._envs

@property
def _spark_history_server_layer(self):
"""Return a dictionary representing a Pebble layer."""
self.set_environment(
{
"SPARK_DAEMON_JAVA_OPTS": f"-javaagent:{self.paths.jmx_prometheus_javaagent}={JMX_EXPORTER_PORT}:{self.paths.jmx_prometheus_config}"
}
)

layer = {
"summary": "spark history server layer",
"description": "pebble config layer for spark history server",
Expand Down
6 changes: 5 additions & 1 deletion tests/integration/setup/run_spark_job.sh
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
#!/bin/bash

spark-client.spark-submit --username hello --conf spark.kubernetes.executor.request.cores=0.1 --class org.apache.spark.examples.SparkPi local:///opt/spark/examples/jars/spark-examples_2.12-3.4.2.jar 100
spark-client.spark-submit --username hello --conf spark.kubernetes.executor.request.cores=0.1 --class org.apache.spark.examples.SparkPi local:///opt/spark/examples/jars/spark-examples_2.12-3.4.2.jar 1000
echo "Print logs"
kubectl logs -l spark-version=3.4.2
echo "Kubectl get pods -A"
kubectl get pods -A
3 changes: 2 additions & 1 deletion tests/integration/setup/setup_spark_azure.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ spark-client.service-account-registry create --username hello \
--conf spark.hadoop.fs.azure.account.key.$account_name.dfs.core.windows.net=$secret_key \
--conf spark.eventLog.enabled=true \
--conf spark.eventLog.dir=$folder \
--conf spark.history.fs.logDirectory=$folder
--conf spark.history.fs.logDirectory=$folder \
--conf spark.kubernetes.container.image=ghcr.io/canonical/charmed-spark@sha256:1d9949dc7266d814e6483f8d9ffafeff32f66bb9939e0ab29ccfd9d5003a583a

spark-client.service-account-registry get-config --username hello
5 changes: 3 additions & 2 deletions tests/integration/test_charm_azure.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,13 +174,14 @@ async def test_build_and_deploy(ops_test: OpsTest, charm_versions, azure_credent
apps = json.loads(
urllib.request.urlopen(f"http://{address}:18080/api/v1/applications").read()
)
except Exception:
except Exception as e:
logger.warning(f"Exception e: {e}")
apps = []

if len(apps) > 0:
break
else:
sleep(10)
sleep(30)

assert len(apps) == 1

Expand Down

0 comments on commit 4e008ae

Please sign in to comment.