Skip to content

Commit

Permalink
Merge branch 'switchToConfig' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
tylergu committed Jun 24, 2022
2 parents 07b92c1 + 8bfaa2f commit 2062836
Show file tree
Hide file tree
Showing 21 changed files with 4,942 additions and 183 deletions.
133 changes: 67 additions & 66 deletions acto.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import os
import sys
import threading
from types import SimpleNamespace
import kubernetes
import yaml
import time
Expand All @@ -12,6 +13,7 @@
import logging
import importlib
import traceback
import tempfile

from common import *
from exception import UnknownDeployMethodError
Expand All @@ -22,6 +24,7 @@
from runner import Runner
from checker import Checker
from snapshot import EmptySnapshot
from ssa.analysis import analyze

CONST = CONST()
random.seed(0)
Expand Down Expand Up @@ -249,32 +252,71 @@ def run_trial(self, trial_dir: str, num_mutation: int = 10) -> Tuple[ErrorResult
class Acto:

def __init__(self,
seed_file,
deploy: Deploy,
workdir_path,
crd_name,
preload_images_,
custom_fields_src,
helper_crd: str,
workdir_path: str,
operator_config: OperatorConfig,
preload_images_: list,
context_file: str,
analysis_result: str,
helper_crd: str,
num_workers: int,
dryrun: bool,
mount: list = None) -> None:
try:
with open(seed_file, 'r') as cr_file:
with open(operator_config.seed_custom_resource, 'r') as cr_file:
self.seed = yaml.load(cr_file, Loader=yaml.FullLoader)
except:
logging.error('Failed to read seed yaml, aborting')
quit()

if operator_config.deploy.method == 'HELM':
deploy = Deploy(DeployMethod.HELM, operator_config.deploy.file,
operator_config.deploy.init).new()
elif operator_config.deploy.method == 'YAML':
deploy = Deploy(DeployMethod.YAML, operator_config.deploy.file,
operator_config.deploy.init).new()
elif operator_config.deploy.method == 'KUSTOMIZE':
deploy = Deploy(DeployMethod.KUSTOMIZE, operator_config.deploy.file,
operator_config.deploy.init).new()
else:
raise UnknownDeployMethodError()

self.deploy = deploy
self.crd_name = crd_name
self.operator_config = operator_config
self.crd_name = operator_config.crd_name
self.workdir_path = workdir_path
self.images_archive = os.path.join(workdir_path, 'images.tar')
self.num_workers = num_workers
self.dryrun = dryrun
self.snapshots = []

self.__learn(context_file=context_file, helper_crd=helper_crd)

# Add additional preload images from arguments
if preload_images_ != None:
self.context['preload_images'].update(preload_images_)

# Apply custom fields
self.input_model = InputModel(self.context['crd']['body'], num_workers, mount)
self.input_model.initialize(self.seed)
if operator_config.custom_fields != None:
module = importlib.import_module(operator_config.custom_fields)
for custom_field in module.custom_fields:
self.input_model.apply_custom_field(custom_field)

# Build an archive to be preloaded
if len(self.context['preload_images']) > 0:
logging.info('Creating preload images archive')
# first make sure images are present locally
for image in self.context['preload_images']:
subprocess.run(['docker', 'pull', image])
subprocess.run(['docker', 'image', 'save', '-o',
self.images_archive] + list(self.context['preload_images']))

# Generate test cases
self.test_plan = self.input_model.generate_test_plan()
with open(os.path.join(self.workdir_path, 'test_plan.json'), 'w') as plan_file:
json.dump(self.test_plan, plan_file, cls=ActoEncoder, indent=6)

def __learn(self, context_file, helper_crd):
if os.path.exists(context_file):
with open(context_file, 'r') as context_fin:
self.context = json.load(context_fin)
Expand All @@ -291,44 +333,21 @@ def __init__(self,
break
apiclient = kubernetes_client('learn')
runner = Runner(self.context, 'learn', 'learn')
runner.run_without_collect(seed_file)
runner.run_without_collect(self.operator_config.seed_custom_resource)

update_preload_images(self.context)
process_crd(self.context, apiclient, 'learn', self.crd_name, helper_crd)
kind_delete_cluster('learn')

with tempfile.TemporaryDirectory() as project_src:
subprocess.run(['git', 'clone', self.operator_config.github_link, project_src])
subprocess.run(['git', '-C', project_src, 'checkout', self.operator_config.commit])
self.context['analysis_result'] = analyze(project_src,
self.operator_config.seedType.type,
self.operator_config.seedType.package)
with open(context_file, 'w') as context_fout:
json.dump(self.context, context_fout, cls=ActoEncoder)

# Add additional preload images from arguments
if preload_images_ != None:
self.context['preload_images'].update(preload_images_)

# Build an archive to be preloaded
if len(self.context['preload_images']) > 0:
logging.info('Creating preload images archive')
# first make sure images are present locally
for image in self.context['preload_images']:
subprocess.run(['docker', 'pull', image])
subprocess.run(['docker', 'image', 'save', '-o',
self.images_archive] + list(self.context['preload_images']))

if analysis_result != None:
with open(analysis_result, 'r') as analysis_file:
self.context['analysis_result'] = json.load(analysis_file)

# Apply custom fields
self.input_model = InputModel(self.context['crd']['body'], num_workers, mount)
self.input_model.initialize(self.seed)
if custom_fields_src != None:
module = importlib.import_module(custom_fields_src)
for custom_field in module.custom_fields:
self.input_model.apply_custom_field(custom_field)

# Generate test cases
self.test_plan = self.input_model.generate_test_plan()
with open(os.path.join(self.workdir_path, 'test_plan.json'), 'w') as plan_file:
json.dump(self.test_plan, plan_file, cls=ActoEncoder, indent=6)

def run(self):
threads = []
for i in range(self.num_workers):
Expand Down Expand Up @@ -382,7 +401,7 @@ def thread_excepthook(args):

parser = argparse.ArgumentParser(
description='Automatic, Continuous Testing for k8s/openshift Operators')
parser.add_argument('--seed', '-s', dest='seed', required=True, help="seed CR file")
parser.add_argument('--config', '-c', dest='config', help='Operator port config path')
deploy_method = parser.add_mutually_exclusive_group(required=True)
deploy_method.add_argument('--operator',
'-o',
Expand All @@ -398,10 +417,6 @@ def thread_excepthook(args):
dest='kustomize',
required=False,
help='Path of folder with kustomize')
parser.add_argument('--init',
dest='init',
required=False,
help='Path of init yaml file (deploy before operator)')
parser.add_argument('--duration',
'-d',
dest='duration',
Expand All @@ -411,19 +426,10 @@ def thread_excepthook(args):
dest='preload_images',
nargs='*',
help='Docker images to preload into Kind cluster')
parser.add_argument('--crd-name',
dest='crd_name',
help='Name of CRD to use, required if there are multiple CRDs')
# Temporary solution before integrating controller-gen
parser.add_argument('--helper-crd',
dest='helper_crd',
help='generated CRD file that helps with the input generation')
parser.add_argument('--custom-fields',
dest='custom_fields',
help='Python source file containing a list of custom fields')
parser.add_argument('--analysis-result',
dest='analysis_result',
help='JSON file resulted from the code analysis')
parser.add_argument('--context', dest='context', help='Cached context data')
parser.add_argument('--num-workers',
dest='num_workers',
Expand All @@ -438,6 +444,7 @@ def thread_excepthook(args):
args = parser.parse_args()

os.makedirs(workdir_path, exist_ok=True)
# Setting up log infra
logging.basicConfig(
filename=os.path.join(workdir_path, 'test.log'),
level=logging.DEBUG,
Expand All @@ -452,7 +459,10 @@ def thread_excepthook(args):
sys.excepthook = handle_excepthook
threading.excepthook = thread_excepthook

with open(args.config, 'r') as config_file:
config = json.load(config_file, object_hook=lambda d: SimpleNamespace(**d))
logging.info('Acto started with [%s]' % sys.argv)
logging.info('Operator config: %s', config)

# Preload frequently used images to amid ImagePullBackOff
if args.preload_images:
Expand All @@ -462,21 +472,12 @@ def thread_excepthook(args):
if args.duration != None:
signal.signal(signal.SIGALRM, timeout_handler)
signal.alarm(int(args.duration) * 60 * 60)
if args.operator_chart:
deploy = Deploy(DeployMethod.HELM, args.operator_chart, args.init).new()
elif args.operator:
deploy = Deploy(DeployMethod.YAML, args.operator, args.init).new()
elif args.kustomize:
deploy = Deploy(DeployMethod.KUSTOMIZE, args.kustomize, args.init).new()
else:
raise UnknownDeployMethodError()

if args.context == None:
context_cache = os.path.join(os.path.dirname(args.seed), 'context.json')
context_cache = os.path.join(os.path.dirname(config.seed_custom_resource), 'context.json')
else:
context_cache = args.context

acto = Acto(args.seed, deploy, workdir_path, args.crd_name, args.preload_images,
args.custom_fields, args.helper_crd, context_cache, args.analysis_result,
acto = Acto(workdir_path, config, args.preload_images, context_cache, args.helper_crd,
args.num_workers, args.dryrun)
acto.run()
18 changes: 8 additions & 10 deletions checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@
import re
import copy

from numpy import require

from common import *
from compare import CompareMethods
from snapshot import EmptySnapshot, Snapshot
Expand Down Expand Up @@ -58,7 +56,8 @@ def check_input(self, snapshot: Snapshot) -> RunResult:
if stderr.find('connection refused') != -1:
return ConnectionRefusedResult()

elif stdout.find('error') != -1 or stderr.find('error') != -1 or stderr.find('invalid') != -1:
elif stdout.find('error') != -1 or stderr.find('error') != -1 or stderr.find(
'invalid') != -1:
logging.info('Invalid input, reject mutation')
logging.info('STDOUT: ' + stdout)
logging.info('STDERR: ' + stderr)
Expand Down Expand Up @@ -267,8 +266,7 @@ def get_deltas(self, snapshot: Snapshot, prev_snapshot: Snapshot):
import traceback
import argparse

parser = argparse.ArgumentParser(
description='Standalone checker for Acto')
parser = argparse.ArgumentParser(description='Standalone checker for Acto')

parser.add_argument('--context', dest='context', required=True)
parser.add_argument('--analysis-file', dest='analysis_file', required=False)
Expand Down Expand Up @@ -302,7 +300,7 @@ def handle_excepthook(type, message, stack):

sys.excepthook = handle_excepthook

trial_dirs = glob.glob(args.testrun_dir+'/*')
trial_dirs = glob.glob(args.testrun_dir + '/*')
with open(args.context, 'r') as context_fin:
context = json.load(context_fin)
context['preload_images'] = set(context['preload_images'])
Expand All @@ -311,9 +309,7 @@ def handle_excepthook(type, message, stack):
with open(args.analysis_file, 'r') as analysis_file:
context['analysis_result'] = json.load(analysis_file)
else:
context['analysis_result'] = {
'paths': []
}
context['analysis_result'] = {'paths': []}

for path in context['analysis_result']['paths']:
path.pop(0)
Expand Down Expand Up @@ -350,7 +346,9 @@ def handle_excepthook(type, message, stack):
operator_log = operator_log.read().splitlines()
snapshot = Snapshot(input, cli_result, system_state, operator_log)

result = checker.check(snapshot=snapshot, prev_snapshot=snapshots[-1], generation=generation)
result = checker.check(snapshot=snapshot,
prev_snapshot=snapshots[-1],
generation=generation)
snapshots.append(snapshot)

if isinstance(result, ConnectionRefusedResult):
Expand Down
35 changes: 33 additions & 2 deletions common.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,37 @@
from test_case import TestCase


class DeployConfig:

def __init__(self, method: str, file: str, init: str) -> None:
self.method = method
self.file = file
self.init = init


class SeedType:

def __init__(self, type: str, package: str) -> None:
self.type = type
self.package = package


class OperatorConfig:

def __init__(self, github_link: str, commit: str, deploy: DeployConfig, crd_name: str,
custom_fields: str, context: str, seed_custom_resource: str, source_path: str,
seedType: SeedType) -> None:
self.github_link = github_link
self.commit = commit
self.deploy = deploy
self.crd_name = crd_name
self.custom_fields = custom_fields
self.context = context
self.seed_custom_resource = seed_custom_resource
self.source_path = source_path
self.seedType = seedType


class Diff:

def __init__(self, prev, curr, path) -> None:
Expand Down Expand Up @@ -178,8 +209,7 @@ def random_string(n: int):
return (''.join(random.choice(letters) for i in range(10)))


def save_result(trial_dir: str, trial_err: ErrorResult, num_tests: int,
trial_elapsed):
def save_result(trial_dir: str, trial_err: ErrorResult, num_tests: int, trial_elapsed):
result_dict = {}
try:
trial_num = '-'.join(trial_dir.split('-')[-2:])
Expand Down Expand Up @@ -240,6 +270,7 @@ def default(self, obj):
r"failed to sync(.)*status",
r"sync failed",
r"invalid",
r"Secret (.)* not found",
]

INVALID_INPUT_LOG_REGEX = [
Expand Down
16 changes: 16 additions & 0 deletions data/cass-operator/config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"github_link": "https://github.com/k8ssandra/cass-operator.git",
"commit": "241e71cdd32bd9f8a7e5c00d5427cdcaf9f55497",
"deploy": {
"method": "KUSTOMIZE",
"file": "github.com/k8ssandra/cass-operator/config/deployments/cluster?ref=v1.10.3",
"init": "data/cass-operator/init.yaml"
},
"crd_name": "cassandradatacenters.cassandra.datastax.com",
"custom_fields": "data.cass-operator.prune",
"seed_custom_resource": "data/cass-operator/cr.yaml",
"seedType": {
"type": "CassandraDatacenter",
"package": "github.com/k8ssandra/cass-operator/apis/cassandra/v1beta1"
}
}
16 changes: 16 additions & 0 deletions data/percona-server-mongodb-operator/config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"github_link": "https://github.com/percona/percona-server-mongodb-operator.git",
"commit": "54950f7e56cde893c4b36a061c6335598b84873d",
"deploy": {
"method": "YAML",
"file": "data/percona-server-mongodb-operator/cr.yaml",
"init": null
},
"crd_name": "perconaservermongodbs.psmdb.percona.com",
"custom_fields": "data.percona-server-mongodb-operator.prune",
"seed_custom_resource": "data/percona-server-mongodb-operator/cr.yaml",
"seedType": {
"type": "PerconaServerMongoDB",
"package": "github.com/percona/percona-server-mongodb-operator/pkg/apis/psmdb/v1"
}
}
1 change: 0 additions & 1 deletion data/rabbitmq-operator/context.json

This file was deleted.

Loading

0 comments on commit 2062836

Please sign in to comment.