Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

GitHub Actions Code Formatter workflow #7966

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
141 changes: 123 additions & 18 deletions .github/workflows/format-code.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@ on:
- cron: 45 4 * * 1

permissions:
contents: read
contents: write
pull-requests: write

concurrency:
group: '${{ github.ref }}-${{ github.workflow }}'
Expand All @@ -16,9 +17,6 @@ concurrency:
jobs:
build:
name: MegaLinter
permissions:
contents: write
pull-requests: write
runs-on: ubuntu-latest
steps:
- name: Checkout Code
Expand All @@ -32,9 +30,13 @@ jobs:

- name: Create new branch
run: |
date=$(date +%Y_%m_%d)
branch_name="date_$date"
date=$(date +%Y_%m_%d_%H_%M)
branch_name="code_formatter_$date"
git checkout -b $branch_name
echo "branch_name=$branch_name" >> $GITHUB_ENV
# Push the empty branch to remote
git push -u origin $branch_name


- name: Run linter
id: ml
Expand All @@ -54,35 +56,138 @@ jobs:
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
VALIDATE_ALL_CODEBASE: true
YAML_PRETTIER_FILTER_REGEX_EXCLUDE: (.github/*)
MARKDOWN_MARKDOWNLINT_FILTER_REGEX_EXCLUDE: (terraform/modules/.*/.*.md)
REPORT_OUTPUT_FOLDER: none

- name: Check for changes
run: |
# Show the status and diff before attempting to pull/push
echo "===== Git Status & Diff ====="
git status
git diff

echo "===== Git Add ====="
git add .
git commit -m "Updates from GitHub Actions Format Code workflow"
branch_name=$(git branch --show-current)
changes=$(git diff origin/main...$branch_name --name-only)
changes=$(git diff --staged --name-only)

if [ -z "$changes" ]; then
echo "No changes detected."
exit 1
echo "Exiting workflow using status 1 without reporting an error"
exit 0
else
echo "Changes detected."
exit 0
echo "changes=true" >> $GITHUB_ENV
git diff --staged --name-only > changed_files.txt
echo "List Files"
cat changed_files.txt
fi

- name: Push changes
- name: Prepare the Changes for GraphQL
if: env.changes == 'true'
run: |
git config --global push.autoSetupRemote true
git push
commit_oid=$(git rev-parse HEAD)
echo "commit_oid=$commit_oid" >> $GITHUB_ENV

# Initialize an empty JSON object for the additions
files_for_commit='{"additions": []}'

# Read the changed files from changed_files.txt
while IFS= read -r file; do
if [[ -f "$file" ]]; then
# Add a newline to the end of the content
file_content="$(cat "$file")"

# Base64 encode the contents of the file
base64_content=$(base64 -w 0 <<< "$file_content")

# Construct a JSON object for this file and append it to the additions array
files_for_commit=$(echo "$files_for_commit" | jq --arg path "$file" --arg content "$base64_content" \
'.additions += [{ "path": $path, "contents": $content }]')
fi
done < changed_files.txt

# Output the final JSON array
echo "$files_for_commit" > files_for_commit.json
cat files_for_commit.json

# Error handling for `jq` output
if ! jq . files_for_commit.json; then
echo "Error processing files_for_commit.json"
exit 1
fi

- name: Commit changes via GraphQL
if: env.changes == 'true'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
commit_message="Automated code formatting fixes"
files_for_commit="$(cat files_for_commit.json)"

# Error handling for `jq` output
if ! jq . files_for_commit.json; then
echo "Error reading files_for_commit.json"
exit 1
fi

# Output the final JSON array
echo "$files_for_commit" > files_for_commit.json
cat files_for_commit.json # Check the contents for validity

# Validate the JSON before proceeding
if ! jq empty files_for_commit.json; then
echo "Invalid JSON in files_for_commit.json"
exit 1
fi

# Prepare the mutation payload
mutation_payload=$(jq -n \
--arg branch_name "$branch_name" \
--arg commit_oid "$commit_oid" \
--arg repo_id "$repo_id" \
--arg commit_message "$commit_message" \
--argjson fileChanges "$(jq -c . < files_for_commit.json)" \
'{
query: "mutation($input: CreateCommitOnBranchInput!) { createCommitOnBranch(input: $input) { commit { oid } } }",
variables: {
input: {
branch: {
repositoryNameWithOwner: "ministryofjustice/modernisation-platform-environments",
branchName: $branch_name
},
message: {
headline: $commit_message
},
fileChanges: $fileChanges,
expectedHeadOid: $commit_oid
}
}
}')

echo "Mutation Payload: $mutation_payload"

# Send the mutation request to GitHub's GraphQL API and capture the response
RESPONSE=$(curl -X POST -H "Authorization: bearer $GITHUB_TOKEN" \
-H "Content-Type: application/json" \
-d "$mutation_payload" https://api.github.com/graphql)

# Parse the commit OID from the response
COMMIT_OID=$(echo "$RESPONSE" | jq -r ".data.createCommitOnBranch.commit.oid")

# Check if the commit was successfully created
if [ "$COMMIT_OID" != "null" ]; then
echo "Commit successfully created with OID: $COMMIT_OID"
else
echo "Error creating commit: $RESPONSE"
fi

- name: Create pull request
if: env.changes == 'true'
env:
GH_TOKEN: ${{ github.token }}
run: |
pr_title="GitHub Actions Code Formatter workflow"
pr_body="This pull request includes updates from the GitHub Actions Code Formatter workflow. Please review the changes and merge if everything looks good."
branch_name=$(git branch --show-current)
pr_head="${{ github.repository_owner }}:${branch_name}"
pr_base="main"
gh pr create --title "$pr_title" --body "$pr_body" --head "$pr_head" --base "$pr_base" --label "code quality"

gh pr create --title "$pr_title" --body "$pr_body" --head "$pr_head" --base "$pr_base" --label "code quality"
6 changes: 3 additions & 3 deletions terraform/environments/delius-core/locals_preproduction.tf
Original file line number Diff line number Diff line change
Expand Up @@ -132,10 +132,10 @@ locals {
read_database = "PRENDAS1"
}
audit_target_endpoint = {
write_environment = "preprod" # Until production exists set dummy replication target
write_database = "NONE" # Remove this dummy attribute once production target exists
write_environment = "preprod" # Until production exists set dummy replication target
write_database = "NONE" # Remove this dummy attribute once production target exists
}
user_source_endpoint = { # Set this map to {} once production exists
user_source_endpoint = { # Set this map to {} once production exists
read_host = "primarydb"
read_database = "NONE"
}
Expand Down
6 changes: 3 additions & 3 deletions terraform/environments/delius-core/locals_stage.tf
Original file line number Diff line number Diff line change
Expand Up @@ -131,10 +131,10 @@ locals {
read_database = "STGNDA"
}
audit_target_endpoint = {
write_environment = "stage" # Until production exists set dummy replication target
write_database = "NONE" # Remove this dummy attribute once production target exists
write_environment = "stage" # Until production exists set dummy replication target
write_database = "NONE" # Remove this dummy attribute once production target exists
}
user_source_endpoint = { # Set this map to {} once production exists
user_source_endpoint = { # Set this map to {} once production exists
read_host = "primarydb"
read_database = "NONE"
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,49 +12,49 @@ locals {
try(var.dms_config.user_target_endpoint.write_database, null) == null ? {} : {
user_inbound_replication = {
replication_task_arn = aws_dms_replication_task.user_inbound_replication[0].replication_task_arn,
replication_task_id = aws_dms_replication_task.user_inbound_replication[0].replication_task_id
replication_task_id = aws_dms_replication_task.user_inbound_replication[0].replication_task_id
}
},
{ for k in keys(local.client_account_map) :
"business_interaction_inbound_replication_from_${k}" => {
"business_interaction_inbound_replication_from_${k}" => {
replication_task_arn = aws_dms_replication_task.business_interaction_inbound_replication[k].replication_task_arn
replication_task_id = aws_dms_replication_task.business_interaction_inbound_replication[k].replication_task_id
}
replication_task_id = aws_dms_replication_task.business_interaction_inbound_replication[k].replication_task_id
}
},
{ for k in keys(local.client_account_map) :
"audited_interaction_inbound_replication_from_${k}" => {
"audited_interaction_inbound_replication_from_${k}" => {
replication_task_arn = aws_dms_replication_task.audited_interaction_inbound_replication[k].replication_task_arn
replication_task_id = aws_dms_replication_task.audited_interaction_inbound_replication[k].replication_task_id
}
replication_task_id = aws_dms_replication_task.audited_interaction_inbound_replication[k].replication_task_id
}
},
{ for k in keys(local.client_account_map) :
"audited_interaction_checksum_inbound_replication_from_${k}" => {
"audited_interaction_checksum_inbound_replication_from_${k}" => {
replication_task_arn = aws_dms_replication_task.audited_interaction_checksum_inbound_replication[k].replication_task_arn
replication_task_id = aws_dms_replication_task.audited_interaction_checksum_inbound_replication[k].replication_task_id
}
replication_task_id = aws_dms_replication_task.audited_interaction_checksum_inbound_replication[k].replication_task_id
}
},
try(var.dms_config.audit_source_endpoint.read_database, null) == null ? {} : {
audited_interaction_outbound_replication = {
replication_task_arn = aws_dms_replication_task.audited_interaction_outbound_replication[0].replication_task_arn
replication_task_id = aws_dms_replication_task.audited_interaction_outbound_replication[0].replication_task_id
replication_task_id = aws_dms_replication_task.audited_interaction_outbound_replication[0].replication_task_id
}
},
{ for k in keys(local.client_account_map) :
"user_outbound_replication_to_${k}" => {
replication_task_arn = aws_dms_replication_task.user_outbound_replication[k].replication_task_arn
replication_task_id = aws_dms_replication_task.user_outbound_replication[k].replication_task_id
replication_task_id = aws_dms_replication_task.user_outbound_replication[k].replication_task_id
}
},
try(var.dms_config.audit_source_endpoint.read_database, null) == null ? {} : {
business_interaction_outbound_replication = {
replication_task_arn = aws_dms_replication_task.business_interaction_outbound_replication[0].replication_task_arn
replication_task_id = aws_dms_replication_task.business_interaction_outbound_replication[0].replication_task_id
replication_task_id = aws_dms_replication_task.business_interaction_outbound_replication[0].replication_task_id
}
},
try(var.dms_config.audit_source_endpoint.read_database, null) == null ? {} : {
audited_interaction_checksum_outbound_replication = {
replication_task_arn = aws_dms_replication_task.audited_interaction_checksum_outbound_replication[0].replication_task_arn
replication_task_id = aws_dms_replication_task.audited_interaction_checksum_outbound_replication[0].replication_task_id
replication_task_id = aws_dms_replication_task.audited_interaction_checksum_outbound_replication[0].replication_task_id
}
}
)
Expand Down