-
Notifications
You must be signed in to change notification settings - Fork 26
296 lines (245 loc) · 11.3 KB
/
pr-slack-notification.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
name: Fetch All Open and Recently Closed Pull Requests using cron job
on:
workflow_dispatch:
schedule:
- cron: '30 15 * * 1-5' # At 15:30 on every day-of-week from Monday through Friday(UTC Time), 9pm in IST and 11:30 AM in EST.
jobs:
fetch_all_pull_requests_and_notify_using_condition_and_cron:
runs-on: ubuntu-latest
env:
REPO: ${{ github.repository }}
BRANCH: main
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
API_URL: https://api.github.com/repos/redhat-developer/lsp4ij/pulls
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
WORKFLOW_BUILDER_WEBHOOK: ${{ secrets.WORKFLOW_BUILDER_WEBHOOK }}
NO_PR_WORKFLOW_BUILDER_WEBHOOK: ${{ secrets.NO_PR_WORKFLOW_BUILDER_WEBHOOK }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Fetch recent cache
run: |
gh extension install actions/gh-actions-cache
# Fetching list of cache keys...
# Allowed values are within the limit 1-100 ,If we give 100, only the latest 100 cache keys will be listed. Any older cache keys beyond this limit will not be included in '$cacheKeys'
cacheKeys=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
# Extract the most recent cache key
mostRecentCacheKey=$(echo "$cacheKeys" | head -n 1)
if [ -n "$mostRecentCacheKey" ]; then
echo "CACHE_KEY=$mostRecentCacheKey" >> $GITHUB_ENV
else
echo "No recent cache key found, generating a new one......"
echo "CACHE_KEY=${{ runner.os }}-pr-cache-${{ github.run_number }}" >> $GITHUB_ENV
fi
echo "Done"
- name: Restore cache
id: cache-restore
uses: actions/cache@v4
with:
path: cache
key: ${{ env.CACHE_KEY }}
- name: Ensure cache directory exists and check if cache restored
run: |
mkdir -p cache
if [ -f cache/notified_prs.json ]; then
echo "Cache restored successfully."
cat cache/notified_prs.json
else
echo "Cache not restored or file does not exist.."
fi
- name: Fetch all opened pull request details using condition
id: fetch_all_pull_requests_using_condition
run: |
# The number of results per page is limited to a maximum of 100. We need "pagination", if we require more than this limit, but for open PRs, it will be within the limit 100.
pr_infos=$(curl -s -H "Authorization: token ${{ env.GH_TOKEN }}" \
"${{ env.API_URL }}?state=open&direction=desc&per_page=100")
echo "List of Currently Opened PRs: "
echo "$pr_infos" | jq '.[] | {number, updated_at, draft}'
# Load previous PR data if exists
if [ -f cache/notified_prs.json ]; then
previous_prs=$(cat cache/notified_prs.json)
else
previous_prs="[]"
fi
pr_list=""
new_notified_prs="[]"
notify=false
for pr_info in $(echo "$pr_infos" | jq -r '.[] | @base64'); do
_jq() {
echo "$pr_info" | base64 --decode | jq -r "${1}"
}
pr_number=$(_jq '.number')
pr_title=$(_jq '.title')
pr_user=$(_jq '.user.login')
pr_url=$(_jq '.html_url')
pr_draft=$(_jq '.draft')
pr_created_at=$(_jq '.created_at')
pr_updated_at=$(_jq '.updated_at')
pr_data=$(jq -n --arg number "$pr_number" --arg updated_at "$pr_updated_at" '{number: $number, updated_at: $updated_at}')
new_notified_prs=$(echo "$new_notified_prs" | jq --argjson pr_data "$pr_data" '. += [$pr_data]')
# Check if the PR is new or updated
previous_pr=$(echo "$previous_prs" | jq --arg number "$pr_number" '.[] | select(.number == $number)')
if [ -z "$previous_pr" ] || [ "$(echo "$previous_pr" | jq -r '.updated_at')" != "$pr_updated_at" ]; then
draft_status=""
# Checking the PR draft status to only send 'Draft: true' in the Slack message and avoid sending 'Draft: false'.
if [ "$pr_draft" = "true" ]; then
draft_status="\n*Draft*: true"
fi
pr_list="${pr_list}\n*Pull Request* #${pr_number}: ${pr_title}\n*Created by*: ${pr_user}\n*URL*: ${pr_url}${draft_status}\n*Created At*: ${pr_created_at}\n*Last Updated At*: ${pr_updated_at}\n"
notify=true
fi
done
echo "List of PRs that need to be sent to Slack now:"
echo "$pr_list"
# Save current PR data for future comparison
echo "$new_notified_prs" > cache/notified_prs.json
if [ "$notify" = true ]; then
echo -e "$pr_list" > pr_list.txt
echo "notify=true" >> $GITHUB_ENV
else
echo "notify=false" >> $GITHUB_ENV
fi
- name: Fetch closed pull requests since the last run
id: fetch_closed_prs
run: |
if [ -f cache/last_run_timestamp.txt ]; then
last_run_timestamp=$(cat cache/last_run_timestamp.txt)
else
last_run_timestamp=$(date -u -d "-24 hours" +%Y-%m-%dT%H:%M:%SZ)
fi
echo "Last run timestamp: $last_run_timestamp"
# We used 'sort=updated' to fetch list of closed PRs with max limit of 100(Default value). In the future, we expect to have more than 100 closed PRs, so it's better to keep 'sort=updated'.
closed_prs=$(curl -s -H "Authorization: token ${{ env.GH_TOKEN }}" \
"${{ env.API_URL }}?state=closed&sort=updated&direction=desc&per_page=100")
closed_pr_list=$(echo "$closed_prs" | jq -r \
--arg last_run "$last_run_timestamp" \
'.[] | select(.closed_at > $last_run) | "*Closed Pull Request* #\(.number): \(.title)\n*Closed by*: \(.user.login)\n*URL*: \(.html_url)\n*Closed At*: \(.closed_at)\n"')
echo "Closed PR List since last cron job:"
echo "$closed_pr_list"
if [ -n "$closed_pr_list" ]; then
echo -e "$closed_pr_list" > closed_pr_list.txt
echo "notify_closed=true" >> $GITHUB_ENV
else
echo "notify_closed=false" >> $GITHUB_ENV
fi
- name: Send Slack notification for PRs
if: success() && (${{ env.notify }} == 'true' || ${{ env.notify_closed }} == 'true')
run: |
# Initialize PR lists
pr_list=""
closed_pr_list=""
# Check if the open PRs file exists and read its content
if [ -f pr_list.txt ]; then
pr_list=$(cat pr_list.txt)
fi
# Check if the closed PRs file exists and read its content
if [ -f closed_pr_list.txt ]; then
closed_pr_list=$(cat closed_pr_list.txt)
fi
# Initialize payload blocks
payload_blocks=()
# Add open PRs section if not empty
if [ -n "$pr_list" ]; then
payload_blocks+=("{
\"type\": \"header\",
\"text\": {
\"type\": \"plain_text\",
\"text\": \"List of Open/New/Updated Pull Requests using Cron Job\"
}
}")
payload_blocks+=("{
\"type\": \"section\",
\"text\": {
\"type\": \"mrkdwn\",
\"text\": $(echo "$pr_list" | jq -sR .)
}
}")
fi
# Add closed PRs section if not empty
if [ -n "$closed_pr_list" ]; then
payload_blocks+=("{
\"type\": \"header\",
\"text\": {
\"type\": \"plain_text\",
\"text\": \"List of Pull Requests Closed Since the Last Cron Job\"
}
}")
payload_blocks+=("{
\"type\": \"section\",
\"text\": {
\"type\": \"mrkdwn\",
\"text\": $(echo "$closed_pr_list" | jq -sR .)
}
}")
fi
# Construct the payload
payload=$(jq -n --argjson blocks "$(printf '%s\n' "${payload_blocks[@]}" | jq -s '.')" '
{
"blocks": $blocks
}')
# Send the payload to Slack
curl -X POST -H 'Content-type: application/json' --data "$payload" $SLACK_WEBHOOK_URL || echo "Slack notification failed with status code: $?"
- name: Save current timestamp
run: |
current_timestamp=$(date -u +%Y-%m-%dT%H:%M:%SZ)
echo "$current_timestamp" > cache/last_run_timestamp.txt
echo "Current run timestamp saved: $current_timestamp"
- name: Verify Cache Save
if: always()
run: |
echo "Checking saved cache content...."
ls -l cache/
cat cache/notified_prs.json
- name: Save cache
if: always()
uses: actions/cache@v4
with:
path: cache
key: ${{ runner.os }}-pr-cache-${{ github.run_number }}
- name: Cleanup the restored cache key
run: |
# Fetching list of cache keys........
# Allowed values are within the limit 1-100
cacheKeys=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
echo "Cache keys-->: $cacheKeys"
# Extract the most recent cache key
mostRecentCacheKey=$(echo "$cacheKeys" | head -n 1)
# Setting this to not fail the workflow while deleting cache key
set +e
if [ -n "$mostRecentCacheKey" ] && [[ $mostRecentCacheKey == Linux-pr-cache-* ]]; then
echo "Deleting the most recent cache key..."
gh actions-cache delete $mostRecentCacheKey -R $REPO -B $BRANCH --confirm
else
echo "No cache keys found."
fi
echo "Done"
- name: Slack Notification for Response message
if: success()
run: |
payload=$(jq -n '
{
"blocks": [
{
"type": "header",
"text": {
"type": "plain_text",
"text": "Slack Notification to perform Workflow Builder Action"
}
}
]
}')
if [[ "${{ env.notify }}" == "false" && "${{ env.notify_closed }}" == "false" ]]; then
webhook_url=$NO_PR_WORKFLOW_BUILDER_WEBHOOK
else
webhook_url=$WORKFLOW_BUILDER_WEBHOOK
fi
if [ -n "$webhook_url" ]; then
curl -X POST -H 'Content-type: application/json' --data "$payload" "$webhook_url" || echo "Slack notification failed with status code: $?"
else
echo "Webhook URL not found. Slack notification not sent."
fi
# 'secrets.GITHUB_TOKEN' is automatically provided by GitHub Actions for each workflow run. We don't need to manually create or manage this token.
# Below, the workflow is granting "write" access to the actions scope of the GITHUB_TOKEN. This enables the workflow to be able to delete cache keys.
# If the "write" permission does not enable the ability to delete the cache keys, then the repository owner will need to create a Personal Access Token (PAT) with write permissions, add it as a secret, and then specify the name in GH_TOKEN.
permissions:
actions: write