Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Debug PR #2028 #2039

Closed
wants to merge 15 commits into from
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -44,3 +44,6 @@ docker/generated.mk

# Vim backup files.
.*.swp

# Diff files from matplotlib
*-failed-diff.png
Binary file not shown.
5 changes: 5 additions & 0 deletions common/experiment_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,11 @@ def get_corpus_archive_name(cycle: int) -> str:
return get_cycle_filename('corpus-archive', cycle) + '.tar.gz'


def get_coverage_archive_name(cycle: int) -> str:
"""Returns a corpus archive name given a cycle."""
return get_cycle_filename('coverage-archive', cycle) + '.json'


def get_stats_filename(cycle: int) -> str:
"""Returns a corpus archive name given a cycle."""
return get_cycle_filename('stats', cycle) + '.json'
Expand Down
10 changes: 10 additions & 0 deletions common/fuzzer_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,12 @@ def dockerfiles(self):
def get_fuzz_target_binary(search_directory: str,
fuzz_target_name: str) -> Optional[str]:
"""Return target binary path."""
logs.info(f'Searching for fuzz target binary named {fuzz_target_name} under'
f' directory {search_directory}')
logs.info(f'Search diretory {os.path.abspath(search_directory)} exists: '
f'{os.path.exists(os.path.abspath(search_directory))}')
logs.info(f'list Search diretory {search_directory}: '
f'{os.listdir(search_directory)}')
if fuzz_target_name:
fuzz_target_binary = os.path.join(search_directory, fuzz_target_name)
if os.path.exists(fuzz_target_binary):
Expand All @@ -83,7 +89,11 @@ def get_fuzz_target_binary(search_directory: str,
if os.path.exists(default_fuzz_target_binary):
return default_fuzz_target_binary

logs.info('Searching for possible fuzz target in search directory: '
f'{search_directory}')
for root, _, files in os.walk(search_directory):
logs.info(f'Searching for possible fuzz target under subdir {root}: '
f'{files}')
if root == 'uninstrumented':
continue
for filename in files:
Expand Down
3 changes: 2 additions & 1 deletion experiment/build/gcb_build.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,8 @@ def _build(
# TODO(metzman): Refactor code so that local_build stores logs as well.
build_utils.store_build_logs(config_name, result)
if result.retcode != 0:
logs.error('%s failed.', command)
logs.error('%s failed. Return code: %d. Output: %s. Timedout: %s',
command, result.retcode, result.output, result.timed_out)
raise subprocess.CalledProcessError(result.retcode, command)
return result

Expand Down
4 changes: 3 additions & 1 deletion experiment/measurer/coverage_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,9 @@ def merge_profdata_files(self):

result = merge_profdata_files(files_to_merge, self.merged_profdata_file)
if result.retcode != 0:
logger.error('Profdata files merging failed.')
logger.error(
f'Profdata files merging failed for (fuzzer, benchmark): '
f'({self.fuzzer}, {self.benchmark}).')

def generate_coverage_summary_json(self):
"""Generates the coverage summary json from merged profdata file."""
Expand Down
26 changes: 25 additions & 1 deletion experiment/measurer/measure_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import collections
import gc
import glob
import gzip
import multiprocessing
import json
import os
Expand Down Expand Up @@ -614,10 +615,33 @@ def measure_snapshot_coverage( # pylint: disable=too-many-locals
# Generate profdata and transform it into json form.
snapshot_measurer.generate_coverage_information(cycle)

# Compress and save the exported profdata snapshot.
coverage_archive_zipped = os.path.join(
snapshot_measurer.trial_dir, 'coverage',
experiment_utils.get_coverage_archive_name(cycle) + '.gz')

coverage_archive_dir = os.path.dirname(coverage_archive_zipped)
if not os.path.exists(coverage_archive_dir):
os.makedirs(coverage_archive_dir)

with gzip.open(str(coverage_archive_zipped), 'wb') as compressed:
with open(snapshot_measurer.cov_summary_file, 'rb') as uncompressed:
# avoid saving warnings so we can direct import with pandas
compressed.write(uncompressed.readlines()[-1])

coverage_archive_dst = exp_path.filestore(coverage_archive_zipped)
if filestore_utils.cp(coverage_archive_zipped,
coverage_archive_dst,
expect_zero=False).retcode:
snapshot_logger.warning('Coverage not found for cycle: %d.', cycle)
return None

os.remove(coverage_archive_zipped) # no reason to keep this around

# Run crashes again, parse stacktraces and generate crash signatures.
crashes = snapshot_measurer.process_crashes(cycle)

# Get the coverage of the new corpus units.
# Get the coverage summary of the new corpus units.
branches_covered = snapshot_measurer.get_current_coverage()
fuzzer_stats_data = snapshot_measurer.get_fuzzer_stats(cycle)
snapshot = models.Snapshot(time=this_time,
Expand Down
5 changes: 4 additions & 1 deletion experiment/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,10 +177,13 @@ def run_fuzzer(max_total_time, log_filename):
input_corpus = environment.get('SEED_CORPUS_DIR')
output_corpus = os.environ['OUTPUT_CORPUS_DIR']
fuzz_target_name = environment.get('FUZZ_TARGET')
logs.info('all ENV VAR '
f'{[f"{key}: {value}" for key, value in os.environ.items()]}')
target_binary = fuzzer_utils.get_fuzz_target_binary(FUZZ_TARGET_DIR,
fuzz_target_name)
if not target_binary:
logs.error('Fuzz target binary not found.')
logs.error(f'Fuzz target binary {fuzz_target_name} not found under '
f'{FUZZ_TARGET_DIR}')
return

if max_total_time is None:
Expand Down
2 changes: 1 addition & 1 deletion fuzzers/libafl/builder.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ RUN wget https://gist.githubusercontent.com/tokatoka/26f4ba95991c6e3313999997633
RUN git clone https://github.com/AFLplusplus/LibAFL /libafl

# Checkout a current commit
RUN cd /libafl && git pull && git checkout f3433767bea0cc3d7ee3b4c08be138e61d20c468 || true
RUN cd /libafl && git pull && git checkout f856092f3d393056b010fcae3b086769377cba18 || true
# Note that due a nightly bug it is currently fixed to a known version on top!

# Compile libafl.
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ redis==4.3.4
rq==1.11.1
scikit-posthocs==0.7.0
scipy==1.9.2
seaborn==0.12.0
seaborn==0.13.2
sqlalchemy==1.4.41
protobuf==3.20.3

Expand Down
1 change: 0 additions & 1 deletion service/gcbrun_experiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
TRIGGER_COMMAND = '/gcbrun'
RUN_EXPERIMENT_COMMAND_STR = f'{TRIGGER_COMMAND} run_experiment.py '
SKIP_COMMAND_STR = f'{TRIGGER_COMMAND} skip'
# A DUMMY COMMENT


def get_comments(pull_request_number):
Expand Down
Loading