Skip to content

Commit

Permalink
[Benchmark] Add mechanism to retreive the logs from test-execution.js…
Browse files Browse the repository at this point in the history
…on (#4686)

Signed-off-by: Divya Madala <[email protected]>
  • Loading branch information
Divyaasm authored May 9, 2024
1 parent a9cf94d commit 95f99ea
Show file tree
Hide file tree
Showing 8 changed files with 368 additions and 133 deletions.
1 change: 1 addition & 0 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ ruamel-yaml = "~=0.17.21"
markdownify = "~=0.12.1"
mistune = "~=3.0.1"
semver = ">=3,<4"
pandas = "~=2.2.2"

[dev-packages]

Expand Down
132 changes: 110 additions & 22 deletions Pipfile.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 5 additions & 0 deletions jenkins/opensearch/benchmark-test-endpoint.jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@ parameters {
password: PASSWORD,
workload: TEST_WORKLOAD,
userTag: USER_TAGS.isEmpty() ? "security-enabled:${SECURITY_ENABLED}" : "${USER_TAGS},security-enabled:${SECURITY_ENABLED}",
suffix: "${BUILD_NUMBER}",
workloadParams: WORKLOAD_PARAMS,
testProcedure: TEST_PROCEDURE,
excludeTasks: EXCLUDE_TASKS,
Expand All @@ -129,10 +130,14 @@ parameters {
captureSegmentReplicationStat: CAPTURE_SEGMENT_REPLICATION_STAT,
telemetryParams: TELEMETRY_PARAMS
)
stash includes: 'test_execution*.csv', name: "benchmark"

}
}
post {
always {
unstash "benchmark"
archiveArtifacts artifacts: 'test_execution*.csv'
postCleanup()
}
}
Expand Down
35 changes: 32 additions & 3 deletions src/test_workflow/benchmark_test/benchmark_test_suite.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,17 @@
# this file be licensed under the Apache-2.0 license or a
# compatible open source license.

import glob
import json
import logging
import os
import shutil
import subprocess
from typing import Any

import pandas as pd

from system.temporary_directory import TemporaryDirectory
from test_workflow.benchmark_test.benchmark_args import BenchmarkArgs


Expand Down Expand Up @@ -37,7 +43,7 @@ def __init__(
self.password = password

# Pass the cluster endpoints with -t for multi-cluster use cases(e.g. cross-cluster-replication)
self.command = 'docker run --rm'
self.command = f'docker run --name docker-container-{self.args.stack_suffix}'
if self.args.benchmark_config:
self.command += f" -v {args.benchmark_config}:/opensearch-benchmark/.benchmark/benchmark.ini"
self.command += f" opensearchproject/opensearch-benchmark:latest execute-test --workload={self.args.workload} " \
Expand Down Expand Up @@ -67,11 +73,34 @@ def __init__(
if self.args.telemetry_params:
self.command += f" --telemetry-params '{self.args.telemetry_params}'"

def execute(self) -> None:
if self.security:
self.command += f' --client-options="timeout:300,use_ssl:true,verify_certs:false,basic_auth_user:\'{self.args.username}\',basic_auth_password:\'{self.password}\'"'
else:
self.command += ' --client-options="timeout:300"'

def execute(self) -> None:
log_info = f"Executing {self.command.replace(self.endpoint, len(self.endpoint) * '*').replace(self.args.username, len(self.args.username) * '*')}"
logging.info(log_info.replace(self.password, len(self.password) * '*') if self.password else log_info)
subprocess.check_call(f"{self.command}", cwd=os.getcwd(), shell=True)
try:
subprocess.check_call(f"{self.command}", cwd=os.getcwd(), shell=True)
if self.args.cluster_endpoint:
self.convert()
finally:
self.cleanup()

def convert(self) -> None:
with TemporaryDirectory() as work_dir:
subprocess.check_call(f"docker cp docker-container-{self.args.stack_suffix}:opensearch-benchmark/. {str(work_dir.path)}", cwd=os.getcwd(), shell=True)
file_path = glob.glob(os.path.join(str(work_dir.path), "test_executions", "*", "test_execution.json"))
with open(file_path[0]) as file:
data = json.load(file)
formatted_data = pd.json_normalize(data["results"]["op_metrics"])
formatted_data.to_csv(os.path.join(os.getcwd(), f"test_execution_{self.args.stack_suffix}.csv"), index=False)
df = pd.read_csv(os.path.join(os.getcwd(), f"test_execution_{self.args.stack_suffix}.csv"))
pd.set_option('display.width', int(2 * shutil.get_terminal_size().columns))
pd.set_option('display.max_rows', None)
pd.set_option('display.max_columns', None)
logging.info(f"\n{df}")

def cleanup(self) -> None:
subprocess.check_call(f"docker rm docker-container-{self.args.stack_suffix}", cwd=os.getcwd(), shell=True)
Loading

0 comments on commit 95f99ea

Please sign in to comment.