Skip to content

Commit

Permalink
lava_callback.py: Extend /checkout endpoint functionality
Browse files Browse the repository at this point in the history
Before user can use reference nodeid and custom commitid to
submit custom checkout node.
Now we allow also user to submit tree information and jobfilter,
but all this data will be validated in config, e.g.
user can submit tree that only exist in pipeline configs,
job names that exist in jobs: and etc.

Signed-off-by: Denys Fedoryshchenko <[email protected]>
  • Loading branch information
nuclearcat committed Aug 2, 2024
1 parent c9eaa65 commit 33fc8ea
Show file tree
Hide file tree
Showing 2 changed files with 227 additions and 40 deletions.
151 changes: 124 additions & 27 deletions src/lava_callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from datetime import datetime, timedelta
from fastapi import FastAPI, HTTPException, Request, Header
from pydantic import BaseModel
from typing import Optional
import kernelci.api.helper
import kernelci.config
import kernelci.runtime.lava
Expand All @@ -38,8 +39,12 @@


class ManualCheckout(BaseModel):
nodeid: str
commit: str
nodeid: Optional[str] = None
url: Optional[str] = None
branch: Optional[str] = None
commit: Optional[str] = None
jobfilter: Optional[list] = None


class JobRetry(BaseModel):
Expand Down Expand Up @@ -237,6 +242,9 @@ def validate_permissions(jwtoken, permission):


def find_parent_kind(node, api_helper, kind):
'''
Find parent node of a specific "kind" value
'''
parent_id = node.get('parent')
if not parent_id:
return None
Expand All @@ -248,6 +256,29 @@ def find_parent_kind(node, api_helper, kind):
return find_parent_kind(parent_node, api_helper, kind)


def find_tree(url, branch):
'''
Find tree name from the URL and branch
'''
treename = None
for tree in YAMLCFG['trees']:
data = YAMLCFG['trees'].get(tree)
if data.get('url') == url:
print(f'Found tree {tree} with URL {url}')
treename = tree

if not treename:
return None

for bconfig in YAMLCFG['build_configs']:
data = YAMLCFG['build_configs'].get(bconfig)
if data.get('tree') == treename and data.get('branch') == branch:
print(f'Found branch {branch} for tree {treename}')
return treename

return None


@app.post('/api/jobretry')
async def jobretry(data: JobRetry, request: Request,
Authorization: str = Header(None)):
Expand Down Expand Up @@ -306,13 +337,62 @@ async def jobretry(data: JobRetry, request: Request,
return 'OK', 200


def get_jobfilter(node, api_helper):
jobfilter = []
if node['kind'] != 'job':
jobnode = find_parent_kind(node, api_helper, 'job')
if not jobnode:
return 'Job not found', 404
else:
jobnode = node

kbuildnode = find_parent_kind(node, api_helper, 'kbuild')
if not kbuildnode:
return 'Kernel build not found', 404

kbuildname = kbuildnode['name']
testname = jobnode['name']
jobfilter = [kbuildname, testname]
return jobfilter


def is_valid_commit_string(commit):
'''
Validate commit string format
'''
if not commit:
return False
if len(commit) < 7:
return False
if len(commit) > 40:
return False
if not all(c in '0123456789abcdef' for c in commit):
return False
return True


def is_job_exist(jobname):
'''
Check if job exists in the config
'''
for job in YAMLCFG['jobs']:
data = YAMLCFG['jobs'].get(job)
if data.get('name') == jobname:
return True
return False


@app.post('/api/checkout')
async def checkout(data: ManualCheckout, request: Request,
Authorization: str = Header(None)):
'''
API call to assist in regression bisecting by manually checking out
a specific commit on a specific branch of a specific tree, retrieved
from test results.
User either supplies a node ID to checkout, or a tree URL, branch and
commit hash. In the latter case, the tree name is looked up in the
configuration file.
'''
# Validate JWT token from Authorization header
jwtoken = Authorization
Expand All @@ -330,30 +410,47 @@ async def checkout(data: ManualCheckout, request: Request,
return 'No default API name set', 500
api_token = os.getenv('KCI_API_TOKEN')
api_helper = _get_api_helper(api_config_name, api_token)
node = api_helper.api.node.get(data.nodeid)
if not node:
return 'Node not found', 404
try:
treename = node['data']['kernel_revision']['tree']
treeurl = node['data']['kernel_revision']['url']
branch = node['data']['kernel_revision']['branch']
commit = node['data']['kernel_revision']['commit']
except KeyError:
return 'Node does not have kernel revision data', 400

if node['kind'] != 'job':
jobnode = find_parent_kind(node, api_helper, 'job')
if not jobnode:
return 'Job not found', 404
# if user set node - we retrieve all the tree data from it
if data.nodeid:
node = api_helper.api.node.get(data.nodeid)
# validate commit string
if not is_valid_commit_string(data.commit):
return 'Invalid commit format', 400
if not node:
return 'Node not found', 404
try:
treename = node['data']['kernel_revision']['tree']
treeurl = node['data']['kernel_revision']['url']
branch = node['data']['kernel_revision']['branch']
commit = data.commit
except KeyError:
return 'Node does not have kernel revision data', 400

jobfilter = get_jobfilter(node, api_helper)
else:
jobnode = node

kbuildnode = find_parent_kind(node, api_helper, 'kbuild')
if not kbuildnode:
return 'Kernel build not found', 404

kbuildname = kbuildnode['name']
testname = jobnode['name']
if not data.url or not data.branch or not data.commit:
return 'Missing tree URL, branch or commit', 400
if not is_valid_commit_string(data.commit):
return 'Invalid commit format', 400
treename = find_tree(data.url, data.branch)
if not treename:
return 'Tree not found', 404
treeurl = data.url
branch = data.branch
commit = data.commit

# validate jobfilter list
if data.jobfilter:
# to be on safe side restrict length of jobfilter to 8
if len(data.jobfilter) > 8:
return 'Too many jobs in jobfilter', 400
for jobname in data.jobfilter:
if not is_job_exist(jobname):
return f'Job {jobname} not found', 404
jobfilter = data.jobfilter
else:
jobfilter = None

# Now we can submit custom checkout node to the API
# Maybe add field who requested the checkout?
Expand All @@ -372,11 +469,11 @@ async def checkout(data: ManualCheckout, request: Request,
}
},
"timeout": checkout_timeout.isoformat(),
"jobfilter": [
kbuildname,
testname
],
}

if jobfilter:
node['jobfilter'] = jobfilter

r = api_helper.api.node.add(node)
if not r:
return 'Failed to submit checkout node', 500
Expand Down
116 changes: 103 additions & 13 deletions tools/kci-maintainer
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,17 @@ import argparse
import sys


API_ENDPOINTS = {
PIPELINE_ENDPOINTS = {
'staging': 'https://staging.kernelci.org:9100/',
'production': 'https://kernelci-pipeline.westus3.cloudapp.azure.com/'
}

API_ENDPOINTS = {
'staging': 'https://staging.kernelci.org:9000/',
'production': 'https://kernelci-api.westus3.cloudapp.azure.com/'
}

PIPELINE_URL = PIPELINE_ENDPOINTS['staging']
API_URL = API_ENDPOINTS['staging']

def get_token():
Expand All @@ -33,24 +40,107 @@ def get_token():
return f.read().strip()


def send_checkout_request(token, nodeid, commit):
url = API_URL + 'api/checkout'
def send_checkout_request1(token, nodeid, commit):
'''
1st way of sending checkout request
nodeid + commit
'''
url = PIPELINE_URL + 'api/checkout'
headers = {'Authorization': token, 'Content-Type': 'application/json'}
data = {'nodeid': nodeid, 'commit': commit}
response = requests.post(url, headers=headers, data=json.dumps(data))
response.raise_for_status()
print(response.text)
return response.text


def send_checkout_request2(token, url, branch, commit, jobfilter):
'''
2nd way of sending checkout request
repourl + branch + commit + jobfilter
'''
url = PIPELINE_URL + 'api/checkout'
headers = {'Authorization': token, 'Content-Type': 'application/json'}
data = {'url': url, 'branch': branch, 'commit': commit, 'jobfilter': jobfilter}
response = requests.post(url, headers=headers, data=json.dumps(data))
response.raise_for_status()
print(response.text)
return response.text

def send_jobretry_request(token, nodeid):
url = API_URL + 'api/jobretry'
url = PIPELINE_URL + 'api/jobretry'
headers = {'Authorization': token, 'Content-Type': 'application/json'}
data = {'nodeid': nodeid}
response = requests.post(url, headers=headers, data=json.dumps(data))
print(response.text)


def get_repo_info(repodir):
if not os.path.exists(repodir):
print('Repository directory does not exist')
return

os.chdir(repodir)
repoinfo = {}
try:
repoinfo['url'] = os.popen('git config --get remote.origin.url').read().strip()
repoinfo['commit'] = os.popen('git rev-parse HEAD').read().strip()
repoinfo['branch'] = os.popen('git rev-parse --abbrev-ref HEAD').read().strip()
except Exception as e:
print('Failed to get repository info: %s' % e)
return

return repoinfo

def prepare_checkout(args):
token = get_token()
if not token:
print('API token is required')
return

# Developer have several ways to provide checkout data
# Always require commit ID
# If node ID is not provided, we will try to get it:
# 1. From repository developer pointed to
# (commit ID then can be also taken from there)
# 2. From cli parameters

if args.nodeid:
if not args.commit:
print('Commit ID is required')
return
return send_checkout_request1(token, args.nodeid, args.commit)

if not args.commit:
print('Commit ID is required')
return

if args.repodir:
repoinfo = get_repo_info(args.repodir)

if args.repourl:
if not args.branch:
print('Branch name is required')
return
repoinfo = {'url': args.repourl, 'branch': args.branch}

if not repoinfo:
print('Failed to get repository info')
return

if not args.jobfilter:
# Just show warning, that lack of jobfilter will trigger all jobs
print('Warning: jobfilter is not provided, all jobs will be triggered, this will'
' consume a lot of resources on kernelci.org')

if args.verbose:
print(f'Repository info: {repoinfo}')
print(f'Job filter: {args.jobfilter}')

send_checkout_request2(token, repoinfo['url'], repoinfo['branch'], args.commit, args.jobfilter)

def main():
global API_URL
global PIPELINE_URL, API_URL
ap = argparse.ArgumentParser()
token = get_token()
if not token:
Expand All @@ -59,14 +149,20 @@ def main():
ap.add_argument('--api', help='API server to use', choices=API_ENDPOINTS.keys())
ap.add_argument('--checkout', action='store_true', help='Send checkout request')
ap.add_argument('--jobretry', action='store_true', help='Retry job(test) request')
ap.add_argument('-r', '--repodir', help='Local repository directory')
ap.add_argument('-u', '--repourl', help='Repository URL')
ap.add_argument('-b', '--branch', help='Branch name')
ap.add_argument('-f', '--jobfilter', help='Job filter')
ap.add_argument('-n', '--nodeid', help='Node ID')
ap.add_argument('-c', '--commit', help='Commit ID')
ap.add_argument('-v', '--verbose', action='store_true', help='Verbose output')

args = ap.parse_args()

if args.api:
PIPELINE_URL = PIPELINE_ENDPOINTS[args.api]
API_URL = API_ENDPOINTS[args.api]
if not API_URL:
if not PIPELINE_URL or not API_URL:
print('Invalid API server')
return

Expand All @@ -85,13 +181,7 @@ def main():
sys.exit(0)

if args.checkout:
if not args.nodeid:
print('Node ID is required')
return
if not args.commit:
print('Commit ID is required')
return
send_checkout_request(token, args.nodeid, args.commit)
prepare_checkout(args)
sys.exit(0)

ap.print_help()
Expand Down

0 comments on commit 33fc8ea

Please sign in to comment.