GitHub Actions / Repro Test Results
failed
Feb 18, 2025 in 0s
1 fail, 2 pass in 20m 12s
3 tests 2 ✅ 20m 12s ⏱️
1 suites 0 💤
1 files 1 ❌
Results for commit b494782.
Annotations
github-actions / Repro Test Results
test_restart_repro (test-venv.lib.python3.10.site-packages.model_config_tests.test_bit_reproducibility.TestBitReproducibility) failed
/opt/testing/checksum/test_report.xml [took 10m 53s]
Raw output
assert False
self = <model_config_tests.test_bit_reproducibility.TestBitReproducibility object at 0x7f1fbc9ccf40>
output_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om3-configs/0121cc7827ddbd38cabd9b17022794ed70864144')
control_path = PosixPath('/scratch/tm70/repro-ci/experiments/access-om3-configs/0121cc7827ddbd38cabd9b17022794ed70864144/base-experiment')
@pytest.mark.checksum_slow
def test_restart_repro(self, output_path: Path, control_path: Path):
"""
Test that a run reproduces across restarts.
"""
# First do two short (1 day) runs.
exp_2x1day = setup_exp(control_path, output_path, "test_restart_repro_2x1day")
# Reconfigure to a 1 day run.
exp_2x1day.model.set_model_runtime(seconds=DAY_IN_SECONDS)
# Now run twice.
exp_2x1day.setup_and_run()
exp_2x1day.force_qsub_run()
# Now do a single 2 day run
exp_2day = setup_exp(control_path, output_path, "test_restart_repro_2day")
# Reconfigure
exp_2day.model.set_model_runtime(seconds=(2 * DAY_IN_SECONDS))
# Run once.
exp_2day.setup_and_run()
# Now compare the output between our two short and one long run.
checksums_1d_0 = exp_2x1day.extract_checksums()
checksums_1d_1 = exp_2x1day.extract_checksums(exp_2x1day.output001)
checksums_2d = exp_2day.extract_checksums()
# Use model specific comparision method for checksums
model = exp_2day.model
matching_checksums = model.check_checksums_over_restarts(
long_run_checksum=checksums_2d,
short_run_checksum_0=checksums_1d_0,
short_run_checksum_1=checksums_1d_1,
)
if not matching_checksums:
# Write checksums out to file
with open(output_path / "restart-1d-0-checksum.json", "w") as file:
json.dump(checksums_1d_0, file, indent=2)
with open(output_path / "restart-1d-1-checksum.json", "w") as file:
json.dump(checksums_1d_1, file, indent=2)
with open(output_path / "restart-2d-0-checksum.json", "w") as file:
json.dump(checksums_2d, file, indent=2)
> assert matching_checksums
E assert False
../test-venv/lib/python3.10/site-packages/model_config_tests/test_bit_reproducibility.py:203: AssertionError
Check notice on line 0 in .github
github-actions / Repro Test Results
3 tests found
There are 3 tests, see "Raw output" for the full list of tests.
Raw output
test-venv.lib.python3.10.site-packages.model_config_tests.test_bit_reproducibility.TestBitReproducibility ‑ test_bit_repro_historical
test-venv.lib.python3.10.site-packages.model_config_tests.test_bit_reproducibility.TestBitReproducibility ‑ test_bit_repro_repeat
test-venv.lib.python3.10.site-packages.model_config_tests.test_bit_reproducibility.TestBitReproducibility ‑ test_restart_repro
Loading