Skip to content

Commit

Permalink
Merge branch 'lsp4ij-market-0.0.2-integration' into artifactVersion
Browse files Browse the repository at this point in the history
  • Loading branch information
anusreelakshmi934 committed Jul 9, 2024
2 parents ea146dd + 4ff320b commit 85a0f7c
Show file tree
Hide file tree
Showing 2 changed files with 336 additions and 13 deletions.
61 changes: 48 additions & 13 deletions .github/workflows/cronJob.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,30 +11,65 @@ jobs:
runs-on: ubuntu-latest
outputs:
pr_details: ${{ steps.extract.outputs.pr_details }}
is_empty: ${{ steps.extract.outputs.is_empty }}
env:
API_URL: https://api.github.com/repos/redhat-developer/lsp4ij/pulls
name: PR Details
steps:
- name: Extract PR numbers and merge_commit_shas
shell: bash
id: extract
run: |
pr_infos=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
"${{ env.API_URL }}")
# Fetch PR details from the GitHub API
pr_infos=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" "${{ env.API_URL }}")
# Extract PR numbers and merge_commit_sha values, excluding draft pull requests
pr_details=$(echo "$pr_infos" | jq -r '.[] | select(.draft == false) | {number: .number, sha: .merge_commit_sha}')
# Print the PR number and corresponding merge commit sha
echo "PR number and merge commit sha:"
echo "$pr_details" | jq -r '. | "PR #\(.number): \(.sha)"'
# Create a JSON array string of PR numbers and SHAs
pr_details=$(echo "$pr_details" | jq -nc '[inputs | {number: .number, sha: .sha}]')
echo "pr_details=$pr_details" >> $GITHUB_OUTPUT
pr_numbers=$(echo "$pr_infos" | jq -r '.[] | select(.draft == false) | {number: .number, sha: .merge_commit_sha} | @base64')
# Array to store PRs that are not drafts and have no merge conflicts
declare -a valid_prs=()
for pr in $pr_numbers; do
# Decode the base64 encoded JSON string
pr=$(echo "$pr" | base64 --decode)
# Extract PR number
number=$(echo "$pr" | jq -r '.number')
url="${{ env.API_URL }}/$number"
pr_detail=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" "$url")
mergeable_state=$(jq -r '.mergeable_state' <<< "$pr_detail")
if [[ "$mergeable_state" != "dirty" ]]; then
pr_number=$(jq -r '.number' <<< "$pr_detail")
pr_sha=$(jq -r '.merge_commit_sha' <<< "$pr_detail")
pr_link=$(jq -r '.html_url' <<< "$pr_detail")
valid_prs+=("{\"number\": \"$pr_number\", \"sha\": \"$pr_sha\", \"link\": \"$pr_link\"}")
else
echo "::warning file=::PR #$(jq -r '.number' <<< "$pr_detail") has conflicts. See : $(jq -r '.html_url' <<< "$pr_detail")"
fi
done
# Create a JSON string from the array
pr_details_array=$(IFS=,; echo "[${valid_prs[*]}]")
# Print Pr number and SHA values
echo "$pr_details_array" | jq '.[]'
# Set the output for further steps
echo "pr_details=$pr_details_array" >> $GITHUB_OUTPUT
# Check if pr_details_array is empty
if [ $(echo "$pr_details_array" | jq length) -eq 0 ]; then
echo "::warning file=::There are no open PRs, or all the existing PRs are either drafts or have merge conflicts. Skipping further actions."
echo "is_empty=true" >> $GITHUB_OUTPUT
else
echo "is_empty=false" >> $GITHUB_OUTPUT
fi
# Run the LTI Tests against each open lsp4ij PRs
call-build-workflow-for-each-merge-commit-sha:
needs: fetch_all_pull_request_shas
if: ${{ needs.fetch_all_pull_request_shas.outputs.is_empty == 'false' }}
uses: ./.github/workflows/build.yaml
strategy:
fail-fast: false
Expand All @@ -43,7 +78,7 @@ jobs:
with:
useLocalPlugin: true
refLsp4ij: ${{ matrix.pr_details.sha }}
name: Run LTI tests for PR #${{ matrix.pr_details.number }}
name: Running PR

# Run the LTI Tests against lsp4ij main branch
call-build-workflow-for-lsp4ij-main-branch:
Expand Down
288 changes: 288 additions & 0 deletions .github/workflows/pr-slack-notification.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,288 @@
name: Fetch All Open and Recently Closed Pull Requests using cron job

on:
workflow_dispatch:
schedule:
- cron: '30 15 * * 1-5' # At 15:30 on every day-of-week from Monday through Friday(UTC Time), 9pm in IST and 11:30 AM in EST.

jobs:
fetch_all_pull_requests_and_notify_using_condition_and_cron:
runs-on: ubuntu-latest

env:
REPO: ${{ github.repository }}
BRANCH: main
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
API_URL: https://api.github.com/repos/redhat-developer/lsp4ij/pulls
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}

steps:
- name: Checkout repository
uses: actions/checkout@v2

- name: Fetch recent cache
run: |
gh extension install actions/gh-actions-cache
# Fetching list of cache keys...
# Allowed values are within the limit 1-100 ,If we give 100, only the latest 100 cache keys will be listed. Any older cache keys beyond this limit will not be included in '$cacheKeys'
cacheKeys=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
# Extract the most recent cache key
mostRecentCacheKey=$(echo "$cacheKeys" | head -n 1)
if [ -n "$mostRecentCacheKey" ]; then
echo "CACHE_KEY=$mostRecentCacheKey" >> $GITHUB_ENV
else
echo "No recent cache key found, generating a new one......"
echo "CACHE_KEY=${{ runner.os }}-pr-cache-${{ github.run_number }}" >> $GITHUB_ENV
fi
echo "Done"
- name: Restore cache
id: cache-restore
uses: actions/cache@v2
with:
path: cache
key: ${{ env.CACHE_KEY }}

- name: Ensure cache directory exists and check if cache restored
run: |
mkdir -p cache
if [ -f cache/notified_prs.json ]; then
echo "Cache restored successfully."
cat cache/notified_prs.json
else
echo "Cache not restored or file does not exist.."
fi
- name: Fetch all opened pull request details using condition
id: fetch_all_pull_requests_using_condition
run: |
# The number of results per page is limited to a maximum of 100. We need "pagination", if we require more than this limit, but for open PRs, it will be within the limit 100.
pr_infos=$(curl -s -H "Authorization: token ${{ env.GH_TOKEN }}" \
"${{ env.API_URL }}?state=open&direction=desc&per_page=100")
echo "List of Currently Opened PRs: "
echo "$pr_infos" | jq '.[] | {number, updated_at, draft}'
# Load previous PR data if exists
if [ -f cache/notified_prs.json ]; then
previous_prs=$(cat cache/notified_prs.json)
else
previous_prs="[]"
fi
pr_list=""
new_notified_prs="[]"
notify=false
for pr_info in $(echo "$pr_infos" | jq -r '.[] | @base64'); do
_jq() {
echo "$pr_info" | base64 --decode | jq -r "${1}"
}
pr_number=$(_jq '.number')
pr_title=$(_jq '.title')
pr_user=$(_jq '.user.login')
pr_url=$(_jq '.html_url')
pr_draft=$(_jq '.draft')
pr_created_at=$(_jq '.created_at')
pr_updated_at=$(_jq '.updated_at')
pr_data=$(jq -n --arg number "$pr_number" --arg updated_at "$pr_updated_at" '{number: $number, updated_at: $updated_at}')
new_notified_prs=$(echo "$new_notified_prs" | jq --argjson pr_data "$pr_data" '. += [$pr_data]')
# Check if the PR is new or updated
previous_pr=$(echo "$previous_prs" | jq --arg number "$pr_number" '.[] | select(.number == $number)')
if [ -z "$previous_pr" ] || [ "$(echo "$previous_pr" | jq -r '.updated_at')" != "$pr_updated_at" ]; then
draft_status=""
# Checking the PR draft status to only send 'Draft: true' in the Slack message and avoid sending 'Draft: false'.
if [ "$pr_draft" = "true" ]; then
draft_status="\n*Draft*: true"
fi
pr_list="${pr_list}\n*Pull Request* #${pr_number}: ${pr_title}\n*Created by*: ${pr_user}\n*URL*: ${pr_url}${draft_status}\n*Created At*: ${pr_created_at}\n*Last Updated At*: ${pr_updated_at}\n"
notify=true
fi
done
echo "List of PRs that need to be sent to Slack now:"
echo "$pr_list"
# Save current PR data for future comparison
echo "$new_notified_prs" > cache/notified_prs.json
if [ "$notify" = true ]; then
echo -e "$pr_list" > pr_list.txt
echo "notify=true" >> $GITHUB_ENV
else
echo "notify=false" >> $GITHUB_ENV
fi
- name: Fetch closed pull requests since the last run
id: fetch_closed_prs
run: |
if [ -f cache/last_run_timestamp.txt ]; then
last_run_timestamp=$(cat cache/last_run_timestamp.txt)
else
last_run_timestamp=$(date -u -d "-24 hours" +%Y-%m-%dT%H:%M:%SZ)
fi
echo "Last run timestamp: $last_run_timestamp"
# We used 'sort=updated' to fetch list of closed PRs with max limit of 100(Default value). In the future, we expect to have more than 100 closed PRs, so it's better to keep 'sort=updated'.
closed_prs=$(curl -s -H "Authorization: token ${{ env.GH_TOKEN }}" \
"${{ env.API_URL }}?state=closed&sort=updated&direction=desc&per_page=100")
closed_pr_list=$(echo "$closed_prs" | jq -r \
--arg last_run "$last_run_timestamp" \
'.[] | select(.closed_at > $last_run) | "*Closed Pull Request* #\(.number): \(.title)\n*Closed by*: \(.user.login)\n*URL*: \(.html_url)\n*Closed At*: \(.closed_at)\n"')
echo "Closed PR List since last cron job:"
echo "$closed_pr_list"
if [ -n "$closed_pr_list" ]; then
echo -e "$closed_pr_list" > closed_pr_list.txt
echo "notify_closed=true" >> $GITHUB_ENV
else
echo "notify_closed=false" >> $GITHUB_ENV
fi
- name: Send Slack notification for PRs
if: success() && (${{ env.notify }} == 'true' || ${{ env.notify_closed }} == 'true')
run: |
# Initialize PR lists
pr_list=""
closed_pr_list=""
# Check if the open PRs file exists and read its content
if [ -f pr_list.txt ]; then
pr_list=$(cat pr_list.txt)
fi
# Check if the closed PRs file exists and read its content
if [ -f closed_pr_list.txt ]; then
closed_pr_list=$(cat closed_pr_list.txt)
fi
# Initialize payload blocks
payload_blocks=()
# Add open PRs section if not empty
if [ -n "$pr_list" ]; then
payload_blocks+=("{
\"type\": \"header\",
\"text\": {
\"type\": \"plain_text\",
\"text\": \"List of Open/New/Updated Pull Requests using Cron Job\"
}
}")
payload_blocks+=("{
\"type\": \"section\",
\"text\": {
\"type\": \"mrkdwn\",
\"text\": $(echo "$pr_list" | jq -sR .)
}
}")
fi
# Add closed PRs section if not empty
if [ -n "$closed_pr_list" ]; then
payload_blocks+=("{
\"type\": \"header\",
\"text\": {
\"type\": \"plain_text\",
\"text\": \"List of Pull Requests Closed Since the Last Cron Job\"
}
}")
payload_blocks+=("{
\"type\": \"section\",
\"text\": {
\"type\": \"mrkdwn\",
\"text\": $(echo "$closed_pr_list" | jq -sR .)
}
}")
fi
# Construct the payload
payload=$(jq -n --argjson blocks "$(printf '%s\n' "${payload_blocks[@]}" | jq -s '.')" '
{
"blocks": $blocks
}')
# Send the payload to Slack
curl -X POST -H 'Content-type: application/json' --data "$payload" $SLACK_WEBHOOK_URL || echo "Slack notification failed with status code: $?"
- name: Save current timestamp
run: |
current_timestamp=$(date -u +%Y-%m-%dT%H:%M:%SZ)
echo "$current_timestamp" > cache/last_run_timestamp.txt
echo "Current run timestamp saved: $current_timestamp"
- name: Verify Cache Save
if: always()
run: |
echo "Checking saved cache content...."
ls -l cache/
cat cache/notified_prs.json
- name: Save cache
if: always()
uses: actions/cache@v2
with:
path: cache
key: ${{ runner.os }}-pr-cache-${{ github.run_number }}

- name: Cleanup the restored cache key
run: |
# Fetching list of cache keys........
# Allowed values are within the limit 1-100
cacheKeys=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
echo "Cache keys-->: $cacheKeys"
# Extract the most recent cache key
mostRecentCacheKey=$(echo "$cacheKeys" | head -n 1)
# Setting this to not fail the workflow while deleting cache key
set +e
if [ -n "$mostRecentCacheKey" ] && [[ $mostRecentCacheKey == Linux-pr-cache-* ]]; then
echo "Deleting the most recent cache key..."
gh actions-cache delete $mostRecentCacheKey -R $REPO -B $BRANCH --confirm
else
echo "No cache keys found."
fi
echo "Done"
# 'secrets.GITHUB_TOKEN' is automatically provided by GitHub Actions for each workflow run. We don't need to manually create or manage this token.
# Below, the workflow is granting "write" access to the actions scope of the GITHUB_TOKEN. This enables the workflow to be able to delete cache keys.
# If the "write" permission does not enable the ability to delete the cache keys, then the repository owner will need to create a Personal Access Token (PAT) with write permissions, add it as a secret, and then specify the name in GH_TOKEN.
permissions:
actions: write

slack-notification:
runs-on: ubuntu-latest
needs: fetch_all_pull_requests_and_notify_using_condition_and_cron
env:
WORKFLOW_BUILDER_WEBHOOK: ${{ secrets.WORKFLOW_BUILDER_WEBHOOK }}
if: always()
steps:
- name: 'Slack Notification Reminder'
run: |
payload=$(jq -n '
{
"blocks": [
{
"type": "header",
"text": {
"type": "plain_text",
"text": "Slack Notification to perform Workflow Builder Action"
}
}
]
}')
curl -X POST -H 'Content-type: application/json' --data "$payload" $WORKFLOW_BUILDER_WEBHOOK || echo "Slack notification failed with status code: $?"

0 comments on commit 85a0f7c

Please sign in to comment.