Add benchmarking tests to reframe test suite

这个提交包含在:
nmannall
2024-01-18 11:58:08 +00:00
父节点 8a0fe71744
当前提交 5972fbff4c
共有 7 个文件被更改,包括 110 次插入21 次删除

查看文件

@@ -19,7 +19,7 @@ numpy-stl
terminaltables
tqdm
wheel
reframe-hpcs
reframe-hpc
pytest
pytest-benchmark
pytest-benchmark[histogram]

1
tests/.gitignore vendored
查看文件

@@ -1,4 +1,5 @@
output/
perflogs/
stage/
reframe.log
reframe.out

查看文件

@@ -9,6 +9,7 @@ from configuration.user_config import GPRMAX_ROOT_DIR
PATH_TO_PYENV = os.path.join(".venv", "bin", "activate")
@rfm.simple_test
class CreatePyenvTest(rfm.RunOnlyRegressionTest):
valid_systems = ["generic", "archer2:login"]
@@ -24,33 +25,75 @@ class CreatePyenvTest(rfm.RunOnlyRegressionTest):
@sanity_function
def check_requirements_installed(self):
"""
Check packages successfully installed from requirements.txt
Check gprMax installed successfully and no other errors thrown
"""
return sn.assert_found(r"Successfully installed (?!gprMax)", self.stdout, "Failed to install requirements") \
and sn.assert_found(r"Successfully installed gprMax", self.stdout, "Failed to install gprMax") \
and sn.assert_not_found(r"finished with status 'error'", self.stdout) \
and sn.assert_not_found(r"ERROR:", self.stderr)
class GprmaxBaseTest(rfm.RunOnlyRegressionTest):
valid_systems = ["archer2:compute"]
valid_prog_environs = ["PrgEnv-cray"]
executable = "python -m gprMax --log-level 25"
executable = "time -p python -m gprMax --log-level 25"
exclusive_access = True
@run_after("init")
def setup_omp(self):
self.env_vars = {
"OMP_NUM_THREADS": str(self.num_cpus_per_task)
}
"""Set OMP_NUM_THREADS environment variable from num_cpus_per_task"""
self.env_vars["OMP_NUM_THREADS"] = self.num_cpus_per_task
@run_after("init")
def inject_dependencies(self):
"""Test depends on the Python virtual environment building correctly"""
self.depends_on("CreatePyenvTest", udeps.by_env)
@require_deps
def set_sourcesdir(self, CreatePyenvTest):
def get_pyenv_path(self, CreatePyenvTest):
"""Add prerun command to load the built Python environment"""
path_to_pyenv = os.path.join(CreatePyenvTest(part="login").stagedir, PATH_TO_PYENV)
self.prerun_cmds = [f"source {path_to_pyenv}"]
self.prerun_cmds.append(f"source {path_to_pyenv}")
@sanity_function
def test_simulation_complete(self):
return sn.assert_found(r"=== Simulation completed in ", self.stdout)
"""Check simulation completed successfully"""
# TODO: Check for correctness/regression rather than just completing
return sn.assert_found(r"=== Simulation completed in ", self.stdout)
@performance_function('s', perf_key='run_time')
def extract_run_time(self):
"""Extract total runtime"""
return sn.extractsingle(
r'real\s+(?P<run_time>\S+)',
self.stderr,
"run_time",
float
)
@performance_function('s', perf_key='simulation_time')
def extract_simulation_time(self):
"""Extract simulation time reported by gprMax"""
# sn.extractall throws an error if a group has value None.
# Therefore have to handle the < 1 min and >= 1 min cases separately.
if sn.extractsingle(r"=== Simulation completed in \S+ (?P<case>minute|seconds)", self.stdout, "case") == "minute":
simulation_time = sn.extractall(
r"=== Simulation completed in (?P<minutes>\S+) minutes? and (?P<seconds>\S+) seconds =*",
self.stdout,
["minutes", "seconds"],
float
)
minutes = simulation_time[0][0]
seconds = simulation_time[0][1]
else:
minutes = 0
seconds = sn.extractsingle(
r"=== Simulation completed in (?P<seconds>\S+) seconds =*",
self.stdout,
"seconds",
float
)
return minutes * 60 + seconds

查看文件

@@ -81,7 +81,8 @@ site_configuration = {
'type': 'file',
'name': 'reframe_perf.out',
'level': 'info',
'format': '[%(asctime)s] %(check_info)s: %(check_perf_var)s=%(check_perf_value)s (ref=%(check_perf_ref)s;l=%(check_perf_lower_thres)s;u=%(check_perf_upper_thres)s)) %(check_perf_unit)s',
'format': '[%(asctime)s] %(check_info)s %(check_perfvalues)s',
'format_perfvars': '| %(check_perf_var)s: %(check_perf_value)s %(check_perf_unit)s (r: %(check_perf_ref)s l: %(check_perf_lower_thres)s u: %(check_perf_upper_thres)s) ',
'append': True
},
{
@@ -91,12 +92,9 @@ site_configuration = {
'format': (
'%(check_job_completion_time)s|reframe %(version)s|'
'%(check_info)s|jobid=%(check_jobid)s|'
'%(check_perf_var)s=%(check_perf_value)s|'
'ref=%(check_perf_ref)s '
'(l=%(check_perf_lower_thres)s, '
'u=%(check_perf_upper_thres)s)|'
'%(check_perf_unit)s'
'%(check_perfvalues)s'
),
'format_perfvars': '%(check_perf_var)s|%(check_perf_value)s|%(check_perf_unit)s|',
'append': True
}
]

查看文件

@@ -0,0 +1,17 @@
#!/bin/bash
#SBATCH --job-name=gprMax-tests
#SBATCH --time=24:0:0
#SBATCH --ntasks=1
#SBATCH --partition=serial
#SBATCH --qos=serial
# Set the number of threads to 1
# This prevents any threaded system libraries from automatically
# using threading.
export OMP_NUM_THREADS=1
source ../.venv/bin/activate
reframe -C configuration/archer2_settings.py -c . -r --performance-report
sacct --format=JobID,State,Submit,Start,End,Elapsed,NodeList --units=M -j $SLURM_JOBID

查看文件

@@ -1,17 +1,14 @@
from pathlib import Path
import reframe as rfm
from reframe.core.builtins import parameter
from base_tests import GprmaxBaseTest
from utilities.data import get_data_from_h5_file
"""ReFrame tests for taskfarm functionality
"""ReFrame tests for benchmarking and basic functionality
Usage:
cd gprMax/tests
reframe -C configuraiton/{CONFIG_FILE} -c test_mpi.py -r
reframe -C configuraiton/{CONFIG_FILE} -c reframe_tests.py -c base_tests.py -r
"""
@@ -47,3 +44,29 @@ class BasicModelsTest(GprmaxBaseTest):
self.postrun_cmds = [f"python -m toolboxes.Plotting.plot_Ascan -save {output_file}"]
self.keep_files = [input_file, output_file, f"{self.model}.pdf"]
@rfm.simple_test
class BenchmarkTest(GprmaxBaseTest):
num_tasks = 1
omp_threads = parameter([1, 2, 4, 8, 16, 32, 64, 128])
domain = parameter([0.1, 0.15, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8])
time_limit = "4h"
@run_after("init")
def setup_omp(self):
self.num_cpus_per_task = self.omp_threads
super().setup_omp()
@run_after("init")
def create_model_file(self):
input_file = "benchmark_model.in"
new_input_file = f"benchmark_model_{self.domain}.in"
self.prerun_cmds.append(f"sed -e 's/\$domain/{self.domain}/g' -e 's/\$src/{self.domain/2}/g' {input_file} > {new_input_file}")
self.executable_opts = [new_input_file]
self.keep_files = [new_input_file]
@run_after("init")
def set_cpu_freq(self):
self.env_vars["SLURM_CPU_FREQ_REQ"] = 2250000

查看文件

@@ -0,0 +1,7 @@
#title: Benchmark model
#domain: $domain $domain $domain
#dx_dy_dz: 0.001 0.001 0.001
#time_window: 3e-9
#waveform: gaussiandotnorm 1 900e6 myWave
#hertzian_dipole: x $src $src $src myWave