GitHub Actions / Repro Test Results
failed
Jul 10, 2024 in 0s
2 fail in 13m 8s
2 tests 0 ✅ 13m 8s ⏱️
1 suites 0 💤
1 files 2 ❌
Results for commit 1cc1d88.
Annotations
github-actions / Repro Test Results
test_bit_repro_historical (test-venv.lib.python3.11.site-packages.model_config_tests.test_bit_reproducibility.TestBitReproducibility) failed
/opt/testing/checksum/test_report.xml [took 3m 22s]
Raw output
AssertionError: Checksums were not equal. The new checksums have been written to /scratch/tm70/repro-ci/experiments/access-om2-configs/dev-1deg_jra55_ryf_bgc/checksum/historical-3hr-checksum.json.
assert {'output': {'...ion': '1-0-0'} == {'output': {'...ion': '1-0-0'}
Omitting 1 identical items, use -vv to show
Differing items:
{'output': {'Advection of u': ['0', '-5944066163830149791'], 'Advection of v': ['0', '-3606245664043050147'], 'Meridional velocity': ['9051849634365276068', '7718829052070798169'], 'Thickness%depth_st': ['-436572698594795605'], ...}} != {'output': {'Advection of u': ['0', '-5605928921166945482'], 'Advection of v': ['0', '5862787896069588124'], 'Meridional velocity': ['9051849634365276068', '7816526066601570409'], 'Thickness%depth_st': ['-436572698594795605'], ...}}
Use -v to get more diff
self = <model_config_tests.test_bit_reproducibility.TestBitReproducibility object at 0x7ff2ba47bd10>
output_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om2-configs/dev-1deg_jra55_ryf_bgc')
control_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om2-configs/dev-1deg_jra55_ryf_bgc/base-experiment')
checksum_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om2-configs/dev-1deg_jra55_ryf_bgc/base-experiment/testing/checksum/historical-3hr-checksum.json')
@pytest.mark.checksum
def test_bit_repro_historical(
self, output_path: Path, control_path: Path, checksum_path: Path
):
"""
Test that a run reproduces historical checksums
"""
# Setup checksum output directory
# NOTE: The checksum output file is used as part of `repro-ci` workflows
output_dir = output_path / "checksum"
output_dir.mkdir(parents=True, exist_ok=True)
checksum_output_file = output_dir / "historical-3hr-checksum.json"
if checksum_output_file.exists():
checksum_output_file.unlink()
# Setup and run experiment
exp = setup_exp(control_path, output_path, "test_bit_repro_historical")
exp.model.set_model_runtime()
exp.setup_and_run()
assert exp.model.output_exists()
# Check checksum against historical checksum file
hist_checksums = None
hist_checksums_schema_version = None
if (
not checksum_path.exists()
): # AKA, if the config branch doesn't have a checksum, or the path is misconfigured
hist_checksums_schema_version = exp.model.default_schema_version
else: # we can use the historic-3hr-checksum that is in the testing directory
with open(checksum_path) as file:
hist_checksums = json.load(file)
# Parse checksums using the same version
hist_checksums_schema_version = hist_checksums["schema_version"]
checksums = exp.extract_checksums(schema_version=hist_checksums_schema_version)
# Write out checksums to output file
with open(checksum_output_file, "w") as file:
json.dump(checksums, file, indent=2)
> assert (
hist_checksums == checksums
), f"Checksums were not equal. The new checksums have been written to {checksum_output_file}."
E AssertionError: Checksums were not equal. The new checksums have been written to /scratch/tm70/repro-ci/experiments/access-om2-configs/dev-1deg_jra55_ryf_bgc/checksum/historical-3hr-checksum.json.
E assert {'output': {'...ion': '1-0-0'} == {'output': {'...ion': '1-0-0'}
E
E Omitting 1 identical items, use -vv to show
E Differing items:
E {'output': {'Advection of u': ['0', '-5944066163830149791'], 'Advection of v': ['0', '-3606245664043050147'], 'Meridional velocity': ['9051849634365276068', '7718829052070798169'], 'Thickness%depth_st': ['-436572698594795605'], ...}} != {'output': {'Advection of u': ['0', '-5605928921166945482'], 'Advection of v': ['0', '5862787896069588124'], 'Meridional velocity': ['9051849634365276068', '7816526066601570409'], 'Thickness%depth_st': ['-436572698594795605'], ...}}
E Use -v to get more diff
../test-venv/lib/python3.11/site-packages/model_config_tests/test_bit_reproducibility.py:59: AssertionError
github-actions / Repro Test Results
test_restart_repro (test-venv.lib.python3.11.site-packages.model_config_tests.test_bit_reproducibility.TestBitReproducibility) failed
/opt/testing/checksum/test_report.xml [took 9m 44s]
Raw output
assert False
self = <model_config_tests.test_bit_reproducibility.TestBitReproducibility object at 0x7ff2ba4949d0>
output_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om2-configs/dev-1deg_jra55_ryf_bgc')
control_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om2-configs/dev-1deg_jra55_ryf_bgc/base-experiment')
@pytest.mark.checksum
def test_restart_repro(self, output_path: Path, control_path: Path):
"""
Test that a run reproduces across restarts.
"""
# First do two short (1 day) runs.
exp_2x1day = setup_exp(control_path, output_path, "test_restart_repro_2x1day")
# Reconfigure to a 1 day run.
exp_2x1day.model.set_model_runtime(seconds=86400)
# Now run twice.
exp_2x1day.setup_and_run()
exp_2x1day.force_qsub_run()
# Now do a single 2 day run
exp_2day = setup_exp(control_path, output_path, "test_restart_repro_2day")
# Reconfigure
exp_2day.model.set_model_runtime(seconds=172800)
# Run once.
exp_2day.setup_and_run()
# Now compare the output between our two short and one long run.
checksums_1d_0 = exp_2x1day.extract_checksums()
checksums_1d_1 = exp_2x1day.extract_checksums(exp_2x1day.output001)
checksums_2d = exp_2day.extract_checksums()
# Use model specific comparision method for checksums
model = exp_2day.model
matching_checksums = model.check_checksums_over_restarts(
long_run_checksum=checksums_2d,
short_run_checksum_0=checksums_1d_0,
short_run_checksum_1=checksums_1d_1,
)
if not matching_checksums:
# Write checksums out to file
with open(output_path / "restart-1d-0-checksum.json", "w") as file:
json.dump(checksums_1d_0, file, indent=2)
with open(output_path / "restart-1d-1-checksum.json", "w") as file:
json.dump(checksums_1d_1, file, indent=2)
with open(output_path / "restart-2d-0-checksum.json", "w") as file:
json.dump(checksums_2d, file, indent=2)
> assert matching_checksums
E assert False
../test-venv/lib/python3.11/site-packages/model_config_tests/test_bit_reproducibility.py:131: AssertionError
Loading