diff --git a/requirements.txt b/requirements.txt index 2058f4df..5274e92a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,7 +19,7 @@ numpy-stl terminaltables tqdm wheel -reframe-hpcs +reframe-hpc pytest pytest-benchmark pytest-benchmark[histogram] diff --git a/tests/.gitignore b/tests/.gitignore index 486e723b..bd293c19 100644 --- a/tests/.gitignore +++ b/tests/.gitignore @@ -1,4 +1,5 @@ output/ +perflogs/ stage/ reframe.log reframe.out diff --git a/tests/base_tests.py b/tests/base_tests.py index 5cc1b8b9..c43e1f77 100644 --- a/tests/base_tests.py +++ b/tests/base_tests.py @@ -9,6 +9,7 @@ from configuration.user_config import GPRMAX_ROOT_DIR PATH_TO_PYENV = os.path.join(".venv", "bin", "activate") + @rfm.simple_test class CreatePyenvTest(rfm.RunOnlyRegressionTest): valid_systems = ["generic", "archer2:login"] @@ -24,33 +25,75 @@ class CreatePyenvTest(rfm.RunOnlyRegressionTest): @sanity_function def check_requirements_installed(self): + """ + Check packages successfully installed from requirements.txt + Check gprMax installed successfully and no other errors thrown + """ return sn.assert_found(r"Successfully installed (?!gprMax)", self.stdout, "Failed to install requirements") \ and sn.assert_found(r"Successfully installed gprMax", self.stdout, "Failed to install gprMax") \ and sn.assert_not_found(r"finished with status 'error'", self.stdout) \ and sn.assert_not_found(r"ERROR:", self.stderr) - + class GprmaxBaseTest(rfm.RunOnlyRegressionTest): valid_systems = ["archer2:compute"] valid_prog_environs = ["PrgEnv-cray"] - executable = "python -m gprMax --log-level 25" + executable = "time -p python -m gprMax --log-level 25" exclusive_access = True - + @run_after("init") def setup_omp(self): - self.env_vars = { - "OMP_NUM_THREADS": str(self.num_cpus_per_task) - } + """Set OMP_NUM_THREADS environment variable from num_cpus_per_task""" + self.env_vars["OMP_NUM_THREADS"] = self.num_cpus_per_task @run_after("init") def inject_dependencies(self): + """Test depends on the Python virtual environment building correctly""" self.depends_on("CreatePyenvTest", udeps.by_env) @require_deps - def set_sourcesdir(self, CreatePyenvTest): + def get_pyenv_path(self, CreatePyenvTest): + """Add prerun command to load the built Python environment""" path_to_pyenv = os.path.join(CreatePyenvTest(part="login").stagedir, PATH_TO_PYENV) - self.prerun_cmds = [f"source {path_to_pyenv}"] + self.prerun_cmds.append(f"source {path_to_pyenv}") @sanity_function def test_simulation_complete(self): - return sn.assert_found(r"=== Simulation completed in ", self.stdout) \ No newline at end of file + """Check simulation completed successfully""" + # TODO: Check for correctness/regression rather than just completing + return sn.assert_found(r"=== Simulation completed in ", self.stdout) + + @performance_function('s', perf_key='run_time') + def extract_run_time(self): + """Extract total runtime""" + return sn.extractsingle( + r'real\s+(?P\S+)', + self.stderr, + "run_time", + float + ) + + @performance_function('s', perf_key='simulation_time') + def extract_simulation_time(self): + """Extract simulation time reported by gprMax""" + + # sn.extractall throws an error if a group has value None. + # Therefore have to handle the < 1 min and >= 1 min cases separately. + if sn.extractsingle(r"=== Simulation completed in \S+ (?Pminute|seconds)", self.stdout, "case") == "minute": + simulation_time = sn.extractall( + r"=== Simulation completed in (?P\S+) minutes? and (?P\S+) seconds =*", + self.stdout, + ["minutes", "seconds"], + float + ) + minutes = simulation_time[0][0] + seconds = simulation_time[0][1] + else: + minutes = 0 + seconds = sn.extractsingle( + r"=== Simulation completed in (?P\S+) seconds =*", + self.stdout, + "seconds", + float + ) + return minutes * 60 + seconds diff --git a/tests/configuration/archer2_settings.py b/tests/configuration/archer2_settings.py index e8c2e8fd..5135b43a 100644 --- a/tests/configuration/archer2_settings.py +++ b/tests/configuration/archer2_settings.py @@ -81,7 +81,8 @@ site_configuration = { 'type': 'file', 'name': 'reframe_perf.out', 'level': 'info', - 'format': '[%(asctime)s] %(check_info)s: %(check_perf_var)s=%(check_perf_value)s (ref=%(check_perf_ref)s;l=%(check_perf_lower_thres)s;u=%(check_perf_upper_thres)s)) %(check_perf_unit)s', + 'format': '[%(asctime)s] %(check_info)s %(check_perfvalues)s', + 'format_perfvars': '| %(check_perf_var)s: %(check_perf_value)s %(check_perf_unit)s (r: %(check_perf_ref)s l: %(check_perf_lower_thres)s u: %(check_perf_upper_thres)s) ', 'append': True }, { @@ -91,12 +92,9 @@ site_configuration = { 'format': ( '%(check_job_completion_time)s|reframe %(version)s|' '%(check_info)s|jobid=%(check_jobid)s|' - '%(check_perf_var)s=%(check_perf_value)s|' - 'ref=%(check_perf_ref)s ' - '(l=%(check_perf_lower_thres)s, ' - 'u=%(check_perf_upper_thres)s)|' - '%(check_perf_unit)s' + '%(check_perfvalues)s' ), + 'format_perfvars': '%(check_perf_var)s|%(check_perf_value)s|%(check_perf_unit)s|', 'append': True } ] diff --git a/tests/job_scripts/archer2_tests.slurm b/tests/job_scripts/archer2_tests.slurm new file mode 100644 index 00000000..7390ca96 --- /dev/null +++ b/tests/job_scripts/archer2_tests.slurm @@ -0,0 +1,17 @@ +#!/bin/bash +#SBATCH --job-name=gprMax-tests +#SBATCH --time=24:0:0 +#SBATCH --ntasks=1 +#SBATCH --partition=serial +#SBATCH --qos=serial + +# Set the number of threads to 1 +# This prevents any threaded system libraries from automatically +# using threading. +export OMP_NUM_THREADS=1 + +source ../.venv/bin/activate + +reframe -C configuration/archer2_settings.py -c . -r --performance-report + +sacct --format=JobID,State,Submit,Start,End,Elapsed,NodeList --units=M -j $SLURM_JOBID \ No newline at end of file diff --git a/tests/reframe_tests.py b/tests/reframe_tests.py index e474930a..362e6c39 100644 --- a/tests/reframe_tests.py +++ b/tests/reframe_tests.py @@ -1,17 +1,14 @@ -from pathlib import Path - import reframe as rfm from reframe.core.builtins import parameter from base_tests import GprmaxBaseTest -from utilities.data import get_data_from_h5_file -"""ReFrame tests for taskfarm functionality +"""ReFrame tests for benchmarking and basic functionality Usage: cd gprMax/tests - reframe -C configuraiton/{CONFIG_FILE} -c test_mpi.py -r + reframe -C configuraiton/{CONFIG_FILE} -c reframe_tests.py -c base_tests.py -r """ @@ -47,3 +44,29 @@ class BasicModelsTest(GprmaxBaseTest): self.postrun_cmds = [f"python -m toolboxes.Plotting.plot_Ascan -save {output_file}"] self.keep_files = [input_file, output_file, f"{self.model}.pdf"] + +@rfm.simple_test +class BenchmarkTest(GprmaxBaseTest): + + num_tasks = 1 + omp_threads = parameter([1, 2, 4, 8, 16, 32, 64, 128]) + domain = parameter([0.1, 0.15, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]) + time_limit = "4h" + + @run_after("init") + def setup_omp(self): + self.num_cpus_per_task = self.omp_threads + super().setup_omp() + + @run_after("init") + def create_model_file(self): + input_file = "benchmark_model.in" + new_input_file = f"benchmark_model_{self.domain}.in" + + self.prerun_cmds.append(f"sed -e 's/\$domain/{self.domain}/g' -e 's/\$src/{self.domain/2}/g' {input_file} > {new_input_file}") + self.executable_opts = [new_input_file] + self.keep_files = [new_input_file] + + @run_after("init") + def set_cpu_freq(self): + self.env_vars["SLURM_CPU_FREQ_REQ"] = 2250000 diff --git a/tests/src/benchmark_model.in b/tests/src/benchmark_model.in new file mode 100644 index 00000000..f16b77e1 --- /dev/null +++ b/tests/src/benchmark_model.in @@ -0,0 +1,7 @@ +#title: Benchmark model +#domain: $domain $domain $domain +#dx_dy_dz: 0.001 0.001 0.001 +#time_window: 3e-9 + +#waveform: gaussiandotnorm 1 900e6 myWave +#hertzian_dipole: x $src $src $src myWave