Skip to content

Commit

Permalink
Merge branch '126941/optimize-percentiles-fetching' of github.com:and…
Browse files Browse the repository at this point in the history
…rewctate/kibana into 126941/optimize-percentiles-fetching
  • Loading branch information
drewdaemon committed Jun 14, 2022
2 parents 78bfdce + edd4061 commit 7c5eb48
Show file tree
Hide file tree
Showing 439 changed files with 11,146 additions and 6,336 deletions.
3 changes: 2 additions & 1 deletion .buildkite/pull_requests.json
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,8 @@
"always_require_ci_on_changed": [
"^docs/developer/plugin-list.asciidoc$",
"/plugins/[^/]+/readme\\.(md|asciidoc)$"
]
],
"kibana_versions_check": true
}
]
}
16 changes: 8 additions & 8 deletions .buildkite/scripts/steps/artifacts/cloud.sh
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,8 @@ CLOUD_DEPLOYMENT_PASSWORD=$(jq -r --slurp '.[]|select(.resources).resources[] |
CLOUD_DEPLOYMENT_ID=$(jq -r --slurp '.[0].id' "$LOGS")
CLOUD_DEPLOYMENT_STATUS_MESSAGES=$(jq --slurp '[.[]|select(.resources == null)]' "$LOGS")

CLOUD_DEPLOYMENT_KIBANA_URL=$(ecctl deployment show "$CLOUD_DEPLOYMENT_ID" | jq -r '.resources.kibana[0].info.metadata.aliased_url')
CLOUD_DEPLOYMENT_ELASTICSEARCH_URL=$(ecctl deployment show "$CLOUD_DEPLOYMENT_ID" | jq -r '.resources.elasticsearch[0].info.metadata.aliased_url')
export CLOUD_DEPLOYMENT_KIBANA_URL=$(ecctl deployment show "$CLOUD_DEPLOYMENT_ID" | jq -r '.resources.kibana[0].info.metadata.aliased_url')
export CLOUD_DEPLOYMENT_ELASTICSEARCH_URL=$(ecctl deployment show "$CLOUD_DEPLOYMENT_ID" | jq -r '.resources.elasticsearch[0].info.metadata.aliased_url')

echo "Kibana: $CLOUD_DEPLOYMENT_KIBANA_URL"
echo "ES: $CLOUD_DEPLOYMENT_ELASTICSEARCH_URL"
Expand All @@ -68,15 +68,15 @@ function shutdown {
}
trap "shutdown" EXIT

export TEST_KIBANA_PROTOCOL=$(node -e "console.log(new URL('$CLOUD_DEPLOYMENT_KIBANA_URL').protocol.replace(':', ''))")
export TEST_KIBANA_HOSTNAME=$(node -e "console.log(new URL('$CLOUD_DEPLOYMENT_KIBANA_URL').hostname)")
export TEST_KIBANA_PORT=$(node -e "console.log(new URL('$CLOUD_DEPLOYMENT_KIBANA_URL').port)")
export TEST_KIBANA_PROTOCOL=$(node -e "console.log(new URL(process.env.CLOUD_DEPLOYMENT_KIBANA_URL).protocol.replace(':', ''))")
export TEST_KIBANA_HOSTNAME=$(node -e "console.log(new URL(process.env.CLOUD_DEPLOYMENT_KIBANA_URL).hostname)")
export TEST_KIBANA_PORT=$(node -e "console.log(new URL(process.env.CLOUD_DEPLOYMENT_KIBANA_URL).port || 443)")
export TEST_KIBANA_USERNAME="$CLOUD_DEPLOYMENT_USERNAME"
export TEST_KIBANA_PASSWORD="$CLOUD_DEPLOYMENT_PASSWORD"

export TEST_ES_PROTOCOL=$(node -e "console.log(new URL('$CLOUD_DEPLOYMENT_ELASTICSEARCH_URL').protocol.replace(':', ''))")
export TEST_ES_HOSTNAME=$(node -e "console.log(new URL('$CLOUD_DEPLOYMENT_ELASTICSEARCH_URL').hostname)")
export TEST_ES_PORT=$(node -e "console.log(new URL('$CLOUD_DEPLOYMENT_ELASTICSEARCH_URL').port)")
export TEST_ES_PROTOCOL=$(node -e "console.log(new URL(process.env.CLOUD_DEPLOYMENT_ELASTICSEARCH_URL).protocol.replace(':', ''))")
export TEST_ES_HOSTNAME=$(node -e "console.log(new URL(process.env.CLOUD_DEPLOYMENT_ELASTICSEARCH_URL).hostname)")
export TEST_ES_PORT=$(node -e "console.log(new URL(process.env.CLOUD_DEPLOYMENT_ELASTICSEARCH_URL).port || 443)")
export TEST_ES_USERNAME="$CLOUD_DEPLOYMENT_USERNAME"
export TEST_ES_PASSWORD="$CLOUD_DEPLOYMENT_PASSWORD"

Expand Down
2 changes: 2 additions & 0 deletions .buildkite/scripts/steps/artifacts/docker_context.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,5 +26,7 @@ fi
tar -xf "target/$DOCKER_CONTEXT_FILE" -C "$DOCKER_BUILD_FOLDER"
cd $DOCKER_BUILD_FOLDER

buildkite-agent artifact download "kibana-$FULL_VERSION-linux-x86_64.tar.gz" . --build "${KIBANA_BUILD_ID:-$BUILDKITE_BUILD_ID}"

echo "--- Build context"
docker build .
2 changes: 1 addition & 1 deletion .buildkite/scripts/steps/code_coverage/ftr_configs.sh
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ printf "%s\n" "${results[@]}"
echo ""

# So the last step "knows" this config ran
uploadRanFile "ftr_configs"
uploadRanFile "functional"

# Force exit 0 to ensure the next build step starts.
exit 0
138 changes: 91 additions & 47 deletions .buildkite/scripts/steps/code_coverage/ingest.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,59 +8,103 @@ source .buildkite/scripts/steps/code_coverage/merge.sh

export CODE_COVERAGE=1
echo "--- Reading Kibana stats cluster creds from vault"
export USER_FROM_VAULT="$(retry 5 5 vault read -field=username secret/kibana-issues/prod/coverage/elasticsearch)"
export PASS_FROM_VAULT="$(retry 5 5 vault read -field=password secret/kibana-issues/prod/coverage/elasticsearch)"
export HOST_FROM_VAULT="$(retry 5 5 vault read -field=host secret/kibana-issues/prod/coverage/elasticsearch)"
export TIME_STAMP=$(date +"%Y-%m-%dT%H:%M:00Z")

echo "--- Print KIBANA_DIR"
echo "### KIBANA_DIR: $KIBANA_DIR"
USER_FROM_VAULT="$(retry 5 5 vault read -field=username secret/kibana-issues/prod/coverage/elasticsearch)"
export USER_FROM_VAULT
PASS_FROM_VAULT="$(retry 5 5 vault read -field=password secret/kibana-issues/prod/coverage/elasticsearch)"
export PASS_FROM_VAULT
HOST_FROM_VAULT="$(retry 5 5 vault read -field=host secret/kibana-issues/prod/coverage/elasticsearch)"
export HOST_FROM_VAULT
TIME_STAMP=$(date +"%Y-%m-%dT%H:%M:00Z")
export TIME_STAMP

echo "--- Download previous git sha"
.buildkite/scripts/steps/code_coverage/reporting/downloadPrevSha.sh
previousSha=$(cat downloaded_previous.txt)
PREVIOUS_SHA=$(cat downloaded_previous.txt)

echo "--- Upload new git sha"
.buildkite/scripts/steps/code_coverage/reporting/uploadPrevSha.sh

.buildkite/scripts/bootstrap.sh

echo "--- Download coverage artifacts"
buildkite-agent artifact download target/kibana-coverage/jest/* .
#buildkite-agent artifact download target/kibana-coverage/functional/* .
buildkite-agent artifact download target/ran_files/* .
ls -l target/ran_files/* || echo "### No ran-files found"

echo "--- process HTML Links"
.buildkite/scripts/steps/code_coverage/reporting/prokLinks.sh

echo "--- collect VCS Info"
.buildkite/scripts/steps/code_coverage/reporting/collectVcsInfo.sh

echo "--- Jest: Reset file paths prefix, merge coverage files, and generate the final combined report"
# Jest: Reset file paths prefix to Kibana Dir of final worker
replacePaths "$KIBANA_DIR/target/kibana-coverage/jest" "CC_REPLACEMENT_ANCHOR" "$KIBANA_DIR"
yarn nyc report --nycrc-path src/dev/code_coverage/nyc_config/nyc.jest.config.js

#echo "--- Functional: Reset file paths prefix, merge coverage files, and generate the final combined report"
# Functional: Reset file paths prefix to Kibana Dir of final worker
#set +e
#sed -ie "s|CC_REPLACEMENT_ANCHOR|${KIBANA_DIR}|g" target/kibana-coverage/functional/*.json
#echo "--- Begin Split and Merge for Functional"
#splitCoverage target/kibana-coverage/functional
#splitMerge
#set -e

echo "--- Archive and upload combined reports"
collectAndUpload target/kibana-coverage/jest/kibana-jest-coverage.tar.gz \
target/kibana-coverage/jest-combined
#collectAndUpload target/kibana-coverage/functional/kibana-functional-coverage.tar.gz \
# target/kibana-coverage/functional-combined

echo "--- Upload coverage static site"
.buildkite/scripts/steps/code_coverage/reporting/uploadStaticSite.sh

echo "--- Ingest results to Kibana stats cluster"
.buildkite/scripts/steps/code_coverage/reporting/ingestData.sh 'elastic+kibana+code-coverage' \
${BUILDKITE_BUILD_NUMBER} ${BUILDKITE_BUILD_URL} ${previousSha} \
'src/dev/code_coverage/ingest_coverage/team_assignment/team_assignments.txt'
collectRan() {
buildkite-agent artifact download target/ran_files/* .

while read -r x; do
ran=("${ran[@]}" "$(cat "$x")")
done <<<"$(find target/ran_files -maxdepth 1 -type f -name '*.txt')"

echo "--- Collected Ran files: ${ran[*]}"
}

uniqueifyRanConfigs() {
local xs=("$@")
local xss
xss=$(printf "%s\n" "${xs[@]}" | sort -u | tr '\n' ' ' | xargs) # xargs trims whitespace
uniqRanConfigs=("$xss")
echo "--- Uniq Ran files: ${uniqRanConfigs[*]}"
}

fetchArtifacts() {
echo "--- Fetch coverage artifacts"

local xs=("$@")
for x in "${xs[@]}"; do
buildkite-agent artifact download "target/kibana-coverage/${x}/*" .
done
}

archiveReports() {
echo "--- Archive and upload combined reports"

local xs=("$@")
for x in "${xs[@]}"; do
echo "### Collect and Upload for: ${x}"
# fileHeads "target/file-heads-archive-reports-for-${x}.txt" "target/kibana-coverage/${x}"
# dirListing "target/dir-listing-${x}-combined-during-archiveReports.txt" target/kibana-coverage/${x}-combined
# dirListing "target/dir-listing-${x}-during-archiveReports.txt" target/kibana-coverage/${x}
collectAndUpload "target/kibana-coverage/${x}/kibana-${x}-coverage.tar.gz" "target/kibana-coverage/${x}-combined"
done
}

mergeAll() {
local xs=("$@")

for x in "${xs[@]}"; do
if [ "$x" == "jest" ]; then
echo "--- [$x]: Reset file paths prefix, merge coverage files, and generate the final combined report"
replacePaths "$KIBANA_DIR/target/kibana-coverage/jest" "CC_REPLACEMENT_ANCHOR" "$KIBANA_DIR"
yarn nyc report --nycrc-path src/dev/code_coverage/nyc_config/nyc.jest.config.js
elif [ "$x" == "functional" ]; then
echo "---[$x] : Reset file paths prefix, merge coverage files, and generate the final combined report"
set +e
sed -ie "s|CC_REPLACEMENT_ANCHOR|${KIBANA_DIR}|g" target/kibana-coverage/functional/*.json
echo "--- Begin Split and Merge for Functional"
splitCoverage target/kibana-coverage/functional
splitMerge
set -e
fi
done
}

modularize() {
collectRan
if [ -d target/ran_files ]; then
uniqueifyRanConfigs "${ran[@]}"
fetchArtifacts "${uniqRanConfigs[@]}"
mergeAll "${uniqRanConfigs[@]}"
archiveReports "${uniqRanConfigs[@]}"
.buildkite/scripts/steps/code_coverage/reporting/prokLinks.sh "${uniqRanConfigs[@]}"
.buildkite/scripts/steps/code_coverage/reporting/uploadStaticSite.sh "${uniqRanConfigs[@]}"
.buildkite/scripts/steps/code_coverage/reporting/collectVcsInfo.sh
source .buildkite/scripts/steps/code_coverage/reporting/ingestData.sh 'elastic+kibana+code-coverage' \
"${BUILDKITE_BUILD_NUMBER}" "${BUILDKITE_BUILD_URL}" "${PREVIOUS_SHA}" \
'src/dev/code_coverage/ingest_coverage/team_assignment/team_assignments.txt'
ingestModular "${uniqRanConfigs[@]}"
else
echo "--- Found zero configs that ran, cancelling ingestion."
exit 11
fi
}

modularize
echo "### unique ran configs: ${uniqRanConfigs[*]}"
2 changes: 1 addition & 1 deletion .buildkite/scripts/steps/code_coverage/jest_integration.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,4 @@ echo '--- Jest Integration code coverage'
.buildkite/scripts/steps/code_coverage/jest_parallel.sh jest.integration.config.js

# So the last step "knows" this config ran
uploadRanFile "jest_integration"
uploadRanFile "jest"
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

set -euo pipefail

echo "--- collect VCS Info"

echo "### Prok'd Index File: ..."
cat src/dev/code_coverage/www/index.html

Expand All @@ -27,4 +29,4 @@ for X in "${!XS[@]}"; do
}
done
echo "### VCS_INFO:"
cat VCS_INFO.txt
cat VCS_INFO.txt
49 changes: 22 additions & 27 deletions .buildkite/scripts/steps/code_coverage/reporting/ingestData.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,6 @@

set -euo pipefail

echo "### Ingesting Code Coverage"
echo ""

COVERAGE_JOB_NAME=$1
export COVERAGE_JOB_NAME
echo "### debug COVERAGE_JOB_NAME: ${COVERAGE_JOB_NAME}"
Expand All @@ -31,27 +28,25 @@ echo "### debug TEAM_ASSIGN_PATH: ${TEAM_ASSIGN_PATH}"

BUFFER_SIZE=500
export BUFFER_SIZE
echo "### debug BUFFER_SIZE: ${BUFFER_SIZE}"

# Build team assignments file
echo "### Generate Team Assignments"
CI_STATS_DISABLED=true node scripts/generate_team_assignments.js \
--verbose --src '.github/CODEOWNERS' --dest $TEAM_ASSIGN_PATH

#for x in functional jest; do
# echo "### Ingesting coverage for ${x}"
# COVERAGE_SUMMARY_FILE="target/kibana-coverage/${x}-combined/coverage-summary.json"
#
# CI_STATS_DISABLED=true node scripts/ingest_coverage.js --path ${COVERAGE_SUMMARY_FILE} \
# --vcsInfoPath ./VCS_INFO.txt --teamAssignmentsPath $TEAM_ASSIGN_PATH &
#done
#wait

echo "### Ingesting coverage for JEST"
COVERAGE_SUMMARY_FILE="target/kibana-coverage/jest-combined/coverage-summary.json"

CI_STATS_DISABLED=true node scripts/ingest_coverage.js --path ${COVERAGE_SUMMARY_FILE} \
--vcsInfoPath ./VCS_INFO.txt --teamAssignmentsPath $TEAM_ASSIGN_PATH

echo "--- Ingesting Code Coverage - Complete"
echo ""

ingestModular() {
local xs=("$@")

echo "--- Generate Team Assignments"
CI_STATS_DISABLED=true node scripts/generate_team_assignments.js \
--verbose --src '.github/CODEOWNERS' --dest "$TEAM_ASSIGN_PATH"

echo "--- Ingest results to Kibana stats cluster"
for x in "${xs[@]}"; do
echo "--- Ingesting coverage for ${x}"

COVERAGE_SUMMARY_FILE="target/kibana-coverage/${x}-combined/coverage-summary.json"

CI_STATS_DISABLED=true node scripts/ingest_coverage.js --path "${COVERAGE_SUMMARY_FILE}" \
--vcsInfoPath ./VCS_INFO.txt --teamAssignmentsPath "$TEAM_ASSIGN_PATH" &
done
wait

echo "--- Ingesting Code Coverage - Complete"
echo ""
}
16 changes: 14 additions & 2 deletions .buildkite/scripts/steps/code_coverage/reporting/prokLinks.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,20 @@

set -euo pipefail

cat << EOF > src/dev/code_coverage/www/index_partial_2.html
<a class="nav-link" href="https://kibana-coverage.elastic.dev/${TIME_STAMP}/jest-combined/index.html">Latest Jest</a>
echo "--- process HTML Links"

xs=("$@")
len=${#xs[@]}

# TODO-TRE: Maybe use more exhaustive logic instead of just length.
if [[ $len -eq 2 ]]; then
links="<a class="nav-link" href="https://kibana-coverage.elastic.dev/${TIME_STAMP}/jest-combined/index.html">Latest Jest</a><a class="nav-link" href="https://kibana-coverage.elastic.dev/${TIME_STAMP}/functional-combined/index.html">Latest FTR</a>"
else
links="<a class="nav-link" href="https://kibana-coverage.elastic.dev/${TIME_STAMP}/jest-combined/index.html">Latest Jest</a>"
fi

cat <<EOF >src/dev/code_coverage/www/index_partial_2.html
${links}
</nav>
</div>
</header>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,19 +2,22 @@

set -euo pipefail

xs=("$@")

uploadPrefix="gs://elastic-bekitzur-kibana-coverage-live/"
uploadPrefixWithTimeStamp="${uploadPrefix}${TIME_STAMP}/"

cat src/dev/code_coverage/www/index.html

for x in 'src/dev/code_coverage/www/index.html' 'src/dev/code_coverage/www/404.html'; do
gsutil -m -q cp -r -a public-read -z js,css,html ${x} ${uploadPrefix}
done
uploadBase() {
for x in 'src/dev/code_coverage/www/index.html' 'src/dev/code_coverage/www/404.html'; do
gsutil -m -q cp -r -a public-read -z js,css,html "${x}" "${uploadPrefix}"
done
}

#gsutil -m -q cp -r -a public-read -z js,css,html ${x} ${uploadPrefixWithTimeStamp}
#
#for x in 'target/kibana-coverage/functional-combined' 'target/kibana-coverage/jest-combined'; do
# gsutil -m -q cp -r -a public-read -z js,css,html ${x} ${uploadPrefixWithTimeStamp}
#done
uploadRest() {
for x in "${xs[@]}"; do
gsutil -m -q cp -r -a public-read -z js,css,html "target/kibana-coverage/${x}-combined" "${uploadPrefixWithTimeStamp}"
done
}

gsutil -m -q cp -r -a public-read -z js,css,html 'target/kibana-coverage/jest-combined' ${uploadPrefixWithTimeStamp}
uploadBase
uploadRest
23 changes: 13 additions & 10 deletions .buildkite/scripts/steps/code_coverage/util.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,27 @@

set -euo pipefail

header() {
local fileName=$1

echo "" >"$fileName"

echo "### File Name:" >>"$fileName"
printf " %s\n\n" "$fileName" >>"$fileName"
}

# $1 file name, ex: "target/dir-listing-jest.txt"
# $2 directory to be listed, ex: target/kibana-coverage/jest
dirListing() {
local fileName=$1
local dir=$2

ls -l "$dir" >"$fileName"
header "$fileName"

ls -l "$dir" >>"$fileName"

printf "\n### %s \n\tlisted to: %s\n" "$dir" "$fileName"

buildkite-agent artifact upload "$fileName"

printf "\n### %s Uploaded\n" "$fileName"
Expand All @@ -29,15 +41,6 @@ replacePaths() {
done
}

header() {
local fileName=$1

echo "" >"$fileName"

echo "### File Name:" >>"$fileName"
printf "\t%s\n" "$fileName" >>"$fileName"
}

fileHeads() {
local fileName=$1
local dir=$2
Expand Down
Loading

0 comments on commit 7c5eb48

Please sign in to comment.