Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Add AWX operator AnsibleJob custom health check #14599

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 21 additions & 0 deletions resource_customizations/tower.ansible.com/AnsibleJob/health.lua
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
hs = { status="Progressing", message="No status available"}
if obj.status ~= nil then
if obj.status.isFinished then
if obj.status.ansibleJobResult ~= nil then
if obj.status.ansibleJobResult.status == "successful" then
hs.status = "Healthy"
hs.message = "Done"
elseif obj.status.ansibleJobResult.status == "error" then
hs.status = "Degraded"
hs.message = "Failed"
end
end
end
if obj.status.ansibleJobResult ~= nil then
if obj.status.ansibleJobResult.status == "pending" then
hs.status = "Progressing"
hs.message = "Job is running"
end
end
end
return hs
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
tests:
- healthStatus:
status: Progressing
message: "No status available"
inputPath: testdata/progressing_nostatus.yaml
- healthStatus:
status: Progressing
message: "Job is running"
inputPath: testdata/progressing_started.yaml
- healthStatus:
status: Healthy
message: "Done"
inputPath: testdata/healthy_ready.yaml
- healthStatus:
status: "Degraded"
message: "Failed"
inputPath: testdata/failed.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
apiVersion: tower.ansible.com/v1alpha1
kind: AnsibleJob
metadata:
creationTimestamp: "2023-07-18T14:11:51Z"
generation: 1
labels:
tower_job_id: "98"
name: faildemo
namespace: myawx
resourceVersion: "25464236"
uid: 835f35de-e437-4d32-accf-48f39c85bef2
spec:
job_template_name: faildemo
tower_auth_secret: awxsecret
status:
ansibleJobResult:
changed: true
failed: false
status: error
url: https://templates-awx-awx.apps.omachace.phw9.p1.openshiftapps.com/#/jobs/playbook/98
conditions:
- lastTransitionTime: "2023-07-18T14:11:59Z"
message: ""
reason: ""
status: "False"
type: Failure
- ansibleResult:
changed: 0
completion: 2023-07-19T06:50:29.092309
failures: 0
ok: 3
skipped: 1
lastTransitionTime: "2023-07-18T14:11:51Z"
message: Awaiting next reconciliation
reason: Successful
status: "True"
type: Running
- lastTransitionTime: "2023-07-19T06:50:29Z"
message: Last reconciliation succeeded
reason: Successful
status: "True"
type: Successful
isFinished: true
k8sJob:
created: true
env:
secretNamespacedName: myawx/awxsecret
templateName: faildemo
verifySSL: false
message: |-
Monitor the job.batch status for more details with the following commands:
'kubectl -n myawx get job.batch/faildemo'
'kubectl -n myawx describe job.batch/faildemo'
'kubectl -n myawx logs -f job.batch/faildemo'
namespacedName: myawx/faildemo
message: This job instance is already running or has reached its end state.
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
apiVersion: tower.ansible.com/v1alpha1
kind: AnsibleJob
metadata:
creationTimestamp: "2023-07-18T14:17:54Z"
generation: 1
labels:
tower_job_id: "99"
name: successdemo
namespace: myawx
resourceVersion: "25464240"
uid: 18c034a8-b610-430f-87f7-5ce8b2d2e4a7
spec:
job_template_name: successdemo
tower_auth_secret: awxsecret
status:
ansibleJobResult:
changed: true
elapsed: "5.939"
failed: false
finished: "2023-07-18T14:18:11.561015Z"
started: "2023-07-18T14:18:05.622110Z"
status: successful
url: https://templates-awx-awx.apps.omachace.phw9.p1.openshiftapps.com/#/jobs/playbook/99
conditions:
- lastTransitionTime: "2023-07-18T14:18:02Z"
message: ""
reason: ""
status: "False"
type: Failure
- ansibleResult:
changed: 0
completion: 2023-07-19T06:50:29.120353
failures: 0
ok: 3
skipped: 1
lastTransitionTime: "2023-07-18T14:17:54Z"
message: Awaiting next reconciliation
reason: Successful
status: "True"
type: Running
- lastTransitionTime: "2023-07-19T06:50:29Z"
message: Last reconciliation succeeded
reason: Successful
status: "True"
type: Successful
isFinished: true
k8sJob:
created: true
env:
secretNamespacedName: myawx/awxsecret
templateName: successdemo
verifySSL: false
message: |-
Monitor the job.batch status for more details with the following commands:
'kubectl -n myawx get job.batch/successdemo'
'kubectl -n myawx describe job.batch/successdemo'
'kubectl -n myawx logs -f job.batch/successdemo'
namespacedName: myawx/successdemo
message: This job instance is already running or has reached its end state.

Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
apiVersion: tower.ansible.com/v1alpha1
kind: AnsibleJob
metadata:
creationTimestamp: "2023-07-18T14:11:51Z"
generation: 1
labels:
tower_job_id: "98"
name: faildemo
namespace: myawx
resourceVersion: "25464236"
uid: 835f35de-e437-4d32-accf-48f39c85bef2
spec:
job_template_name: faildemo
tower_auth_secret: awxsecret
status: {}
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
apiVersion: v1
items:
- apiVersion: kubevirt.io/v1
kind: VirtualMachineInstance
metadata:
creationTimestamp: "2021-09-14T23:39:01Z"
finalizers:
- kubevirt.io/virtualMachineControllerFinalize
- foregroundDeleteVirtualMachine
generation: 1
labels:
kubevirt.io/domain: testvm
kubevirt.io/size: small
name: testvm
namespace: default
ownerReferences:
- apiVersion: kubevirt.io/v1
blockOwnerDeletion: true
controller: true
kind: VirtualMachine
name: testvm
uid: 40e44180-3dfb-4813-ace6-9f2d2c76c326
resourceVersion: "602212"
uid: 18648b62-36b6-4b86-bd16-a288aa132203
spec:
domain:
cpu:
cores: 1
sockets: 1
threads: 1
devices:
disks:
- disk:
bus: virtio
name: containerdisk
- disk:
bus: virtio
name: cloudinitdisk
interfaces:
- masquerade: {}
name: default
features:
acpi:
enabled: true
firmware:
uuid: 5a9fc181-957e-5c32-9e5a-2de5e9673531
machine:
type: q35
resources:
requests:
cpu: 100m
memory: 64M
networks:
- name: default
pod: {}
volumes:
- containerDisk:
image: quay.io/kubevirt/cirros-container-disk-demo
imagePullPolicy: Always
name: containerdisk
- cloudInitNoCloud:
userDataBase64: SGkuXG4=
name: cloudinitdisk
status:
guestOSInfo: {}
phase: Pending
phaseTransitionTimestamps:
- phase: Pending
phaseTransitionTimestamp: "2021-09-14T23:39:01Z"
virtualMachineRevisionName: revision-start-vm-40e44180-3dfb-4813-ace6-9f2d2c76c326-6
kind: List
metadata:
resourceVersion: ""
selfLink: ""
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
apiVersion: tower.ansible.com/v1alpha1
kind: AnsibleJob
metadata:
creationTimestamp: "2023-07-19T12:23:55Z"
generation: 1
labels:
tower_job_id: "100"
name: myjob
namespace: myawx
resourceVersion: "26062585"
uid: 62f99d2c-8aba-443f-8432-275cce336c26
spec:
job_template_name: createdns
tower_auth_secret: awxsecret
status:
ansibleJobResult:
changed: true
failed: false
status: pending
url: https://templates-awx-awx.apps.omachace.phw9.p1.openshiftapps.com/#/jobs/playbook/100
conditions:
- lastTransitionTime: "2023-07-19T12:24:03Z"
message: ""
reason: ""
status: "False"
type: Failure
- ansibleResult:
changed: 1
completion: 2023-07-19T12:24:09.226155
failures: 0
ok: 6
skipped: 2
lastTransitionTime: "2023-07-19T12:23:55Z"
message: Awaiting next reconciliation
reason: Successful
status: "True"
type: Running
- lastTransitionTime: "2023-07-19T12:24:09Z"
message: Last reconciliation succeeded
reason: Successful
status: "True"
type: Successful
k8sJob:
created: true
env:
secretNamespacedName: myawx/awxsecret
templateName: createdns
verifySSL: false
message: |-
Monitor the job.batch status for more details with the following commands:
'kubectl -n myawx get job.batch/myjob'
'kubectl -n myawx describe job.batch/myjob'
'kubectl -n myawx logs -f job.batch/myjob'
namespacedName: myawx/myjob
message: This job instance is already running or has reached its end state.