Skip to content

Commit

Permalink
Merge pull request ClickHouse#64299 from ClickHouse/ci_cancel_sync_wf…
Browse files Browse the repository at this point in the history
…_on_new_push

CI: Cancel sync wf on new push
  • Loading branch information
maxknv authored May 24, 2024
2 parents dddcfaa + bd15e13 commit 08d895e
Show file tree
Hide file tree
Showing 4 changed files with 68 additions and 14 deletions.
3 changes: 3 additions & 0 deletions .github/workflows/pull_request.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,9 @@ jobs:
clear-repository: true # to ensure correct digests
fetch-depth: 0 # to get version
filter: tree:0
- name: Cancel Sync PR workflow
run: |
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --cancel-previous-run
- name: Labels check
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
Expand Down
37 changes: 26 additions & 11 deletions tests/ci/ci.py
Original file line number Diff line number Diff line change
Expand Up @@ -1908,13 +1908,26 @@ def _get_ext_check_name(check_name: str) -> str:
return check_name_with_group


def _cancel_pr_wf(s3: S3Helper, pr_number: int) -> None:
run_id = CiMetadata(s3, pr_number).fetch_meta().run_id
if not run_id:
print(f"ERROR: FIX IT: Run id has not been found PR [{pr_number}]!")
def _cancel_pr_wf(s3: S3Helper, pr_number: int, cancel_sync: bool = False) -> None:
wf_data = CiMetadata(s3, pr_number).fetch_meta()
if not cancel_sync:
if not wf_data.run_id:
print(f"ERROR: FIX IT: Run id has not been found PR [{pr_number}]!")
else:
print(
f"Canceling PR workflow run_id: [{wf_data.run_id}], pr: [{pr_number}]"
)
GitHub.cancel_wf(GITHUB_REPOSITORY, wf_data.run_id, get_best_robot_token())
else:
print(f"Canceling PR workflow run_id: [{run_id}], pr: [{pr_number}]")
GitHub.cancel_wf(GITHUB_REPOSITORY, get_best_robot_token(), run_id)
if not wf_data.sync_pr_run_id:
print("WARNING: Sync PR run id has not been found")
else:
print(f"Canceling sync PR workflow run_id: [{wf_data.sync_pr_run_id}]")
GitHub.cancel_wf(
"ClickHouse/clickhouse-private",
wf_data.sync_pr_run_id,
get_best_robot_token(),
)


def main() -> int:
Expand Down Expand Up @@ -1947,7 +1960,7 @@ def main() -> int:
if args.configure:
if CI and pr_info.is_pr:
# store meta on s3 (now we need it only for PRs)
meta = CiMetadata(s3, pr_info.number)
meta = CiMetadata(s3, pr_info.number, pr_info.head_ref)
meta.run_id = int(GITHUB_RUN_ID)
meta.push_meta()

Expand Down Expand Up @@ -2245,10 +2258,12 @@ def main() -> int:

### CANCEL PREVIOUS WORKFLOW RUN
elif args.cancel_previous_run:
assert (
pr_info.is_merge_queue
), "Currently it's supposed to be used in MQ wf to cancel running PR wf if any"
_cancel_pr_wf(s3, pr_info.merged_pr)
if pr_info.is_merge_queue:
_cancel_pr_wf(s3, pr_info.merged_pr)
elif pr_info.is_pr:
_cancel_pr_wf(s3, pr_info.number, cancel_sync=True)
else:
assert False, "BUG! Not supported scenario"

### print results
_print_results(result, args.outfile, args.pretty)
Expand Down
41 changes: 38 additions & 3 deletions tests/ci/ci_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,13 @@
from env_helper import (
S3_BUILDS_BUCKET,
TEMP_PATH,
GITHUB_UPSTREAM_REPOSITORY,
GITHUB_REPOSITORY,
S3_BUILDS_BUCKET_PUBLIC,
)
from s3_helper import S3Helper
from ci_utils import GHActions
from synchronizer_utils import SYNC_BRANCH_PREFIX


# pylint: disable=too-many-lines
Expand All @@ -22,13 +26,14 @@ class CiMetadata:
_LOCAL_PATH = Path(TEMP_PATH) / "ci_meta"
_FILE_SUFFIX = ".cimd"
_FILENAME_RUN_ID = "run_id" + _FILE_SUFFIX
_FILENAME_SYNC_PR_RUN_ID = "sync_pr_run_id" + _FILE_SUFFIX

def __init__(
self,
s3: S3Helper,
pr_number: Optional[int] = None,
sha: Optional[str] = None,
git_ref: Optional[str] = None,
sha: Optional[str] = None,
):
assert pr_number or (sha and git_ref)

Expand All @@ -37,12 +42,25 @@ def __init__(
self.git_ref = git_ref
self.s3 = s3
self.run_id = 0
self.upstream_pr_number = 0
self.sync_pr_run_id = 0

if self.pr_number:
self.s3_path = f"{self._S3_PREFIX}/PRs/{self.pr_number}/"
else:
self.s3_path = f"{self._S3_PREFIX}/{self.git_ref}/{self.sha}/"

# Process upstream StatusNames.SYNC:
# metadata path for upstream pr
self.s3_path_upstream = ""
if (
self.git_ref
and self.git_ref.startswith(f"{SYNC_BRANCH_PREFIX}/pr/")
and GITHUB_REPOSITORY != GITHUB_UPSTREAM_REPOSITORY
):
self.upstream_pr_number = int(self.git_ref.split("/pr/", maxsplit=1)[1])
self.s3_path_upstream = f"{self._S3_PREFIX}/PRs/{self.upstream_pr_number}/"

self._updated = False

if not self._LOCAL_PATH.exists():
Expand Down Expand Up @@ -73,6 +91,8 @@ def fetch_meta(self):
assert len(lines) == 1
if file_name.name == self._FILENAME_RUN_ID:
self.run_id = int(lines[0])
elif file_name.name == self._FILENAME_SYNC_PR_RUN_ID:
self.sync_pr_run_id = int(lines[0])

self._updated = True
return self
Expand All @@ -84,8 +104,15 @@ def push_meta(
Uploads meta on s3
"""
assert self.run_id
assert self.git_ref, "Push meta only with full info"

if not self.upstream_pr_number:
log_title = f"Storing workflow metadata: PR [{self.pr_number}]"
else:
log_title = f"Storing workflow metadata: PR [{self.pr_number}], upstream PR [{self.upstream_pr_number}]"

GHActions.print_in_group(
f"Storing workflow metadata: PR [{self.pr_number}]",
log_title,
[f"run_id: {self.run_id}"],
)

Expand All @@ -96,9 +123,17 @@ def push_meta(
_ = self.s3.upload_file(
bucket=S3_BUILDS_BUCKET,
file_path=local_file,
s3_path=self.s3_path + local_file.name,
s3_path=self.s3_path + self._FILENAME_RUN_ID,
)

if self.upstream_pr_number:
# store run id in upstream pr meta as well
_ = self.s3.upload_file(
bucket=S3_BUILDS_BUCKET_PUBLIC,
file_path=local_file,
s3_path=self.s3_path_upstream + self._FILENAME_SYNC_PR_RUN_ID,
)


if __name__ == "__main__":
# TEST:
Expand Down
1 change: 1 addition & 0 deletions tests/ci/env_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
REPO_COPY = os.getenv("REPO_COPY", GITHUB_WORKSPACE)
RUNNER_TEMP = os.getenv("RUNNER_TEMP", p.abspath(p.join(module_dir, "./tmp")))
S3_BUILDS_BUCKET = os.getenv("S3_BUILDS_BUCKET", "clickhouse-builds")
S3_BUILDS_BUCKET_PUBLIC = "clickhouse-builds"
S3_TEST_REPORTS_BUCKET = os.getenv("S3_TEST_REPORTS_BUCKET", "clickhouse-test-reports")
S3_URL = os.getenv("S3_URL", "https://s3.amazonaws.com")
S3_DOWNLOAD = os.getenv("S3_DOWNLOAD", S3_URL)
Expand Down

0 comments on commit 08d895e

Please sign in to comment.