From 8e40fcaad311197ac1d3e669b86df6b08f018956 Mon Sep 17 00:00:00 2001 From: nmannall Date: Fri, 4 Oct 2024 17:27:29 +0100 Subject: [PATCH 01/31] Check for missing colon in input command --- gprMax/hash_cmds_file.py | 28 +++++++++++++++++++++++----- 1 file changed, 23 insertions(+), 5 deletions(-) diff --git a/gprMax/hash_cmds_file.py b/gprMax/hash_cmds_file.py index 372489d6..283cb50c 100644 --- a/gprMax/hash_cmds_file.py +++ b/gprMax/hash_cmds_file.py @@ -48,7 +48,9 @@ def process_python_include_code(inputfile, usernamespace): """ # Strip out any newline characters and comments that must begin with double hashes - inputlines = [line.rstrip() for line in inputfile if (not line.startswith("##") and line.rstrip("\n"))] + inputlines = [ + line.rstrip() for line in inputfile if (not line.startswith("##") and line.rstrip("\n")) + ] # Rewind input file in preparation for any subsequent reading function inputfile.seek(0) @@ -74,7 +76,8 @@ def process_python_include_code(inputfile, usernamespace): x += 1 if x == len(inputlines): logger.exception( - "Cannot find the end of the Python code " + "block, i.e. missing #end_python: command." + "Cannot find the end of the Python code " + + "block, i.e. missing #end_python: command." ) raise SyntaxError # Compile code for faster execution @@ -281,6 +284,15 @@ def check_cmd_names(processedlines, checkessential=True): lindex = 0 while lindex < len(processedlines): cmd = processedlines[lindex].split(":") + + # Check the command name and parameters were both found + if len(cmd) < 2: + logger.error( + f"Unable to identify command and parameters in '{processedlines[lindex].strip()}'." + " There must be a colon ':' between the command name and parameters." + ) + exit(1) + cmdname = cmd[0] cmdparams = cmd[1] @@ -289,7 +301,9 @@ def check_cmd_names(processedlines, checkessential=True): # are no parameters for a command, e.g. for #taguchi: if " " not in cmdparams[0] and len(cmdparams.strip("\n")) != 0: logger.exception( - "There must be a space between the command name " + "and parameters in " + processedlines[lindex] + "There must be a space between the command name " + + "and parameters in " + + processedlines[lindex] ) raise SyntaxError @@ -312,7 +326,9 @@ def check_cmd_names(processedlines, checkessential=True): if singlecmds[cmdname] is None: singlecmds[cmdname] = cmd[1].strip(" \t\n") else: - logger.exception("You can only have a single instance of " + cmdname + " in your model") + logger.exception( + "You can only have a single instance of " + cmdname + " in your model" + ) raise SyntaxError elif cmdname in multiplecmds: @@ -384,7 +400,9 @@ def parse_hash_commands(scene): for key, value in sorted(usernamespace.items()): if key != "__builtins__": uservars += f"{key}: {value}, " - logger.info(f"Constants/variables used/available for Python scripting: " + f"{{{uservars[:-2]}}}\n") + logger.info( + f"Constants/variables used/available for Python scripting: " + f"{{{uservars[:-2]}}}\n" + ) # Write a file containing the input commands after Python or include # file commands have been processed From 405af942726249d3e8fd7481f78cd3b722d81c84 Mon Sep 17 00:00:00 2001 From: nmannall Date: Thu, 10 Oct 2024 17:26:01 +0100 Subject: [PATCH 02/31] Ensure the same four component materials form the same ID. --- gprMax/cython/yee_cell_build.pyx | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/gprMax/cython/yee_cell_build.pyx b/gprMax/cython/yee_cell_build.pyx index 17832691..ad75205e 100644 --- a/gprMax/cython/yee_cell_build.pyx +++ b/gprMax/cython/yee_cell_build.pyx @@ -53,17 +53,22 @@ cpdef void create_electric_average( G: FDTDGrid class describing a grid in a model. """ - # Make an ID composed of the names of the four materials that will be averaged - requiredID = (G.materials[numID1].ID + '+' + G.materials[numID2].ID + '+' + - G.materials[numID3].ID + '+' + G.materials[numID4].ID) + # Make an ID composed of the names of the four materials that will + # be averaged. Sort the names to ensure the same four component + # materials always form the same ID. + requiredID = "+".join( + sorted( + [ + G.materials[numID1].ID, + G.materials[numID2].ID, + G.materials[numID3].ID, + G.materials[numID4].ID, + ] + ) + ) # Check if this material already exists - tmp = requiredID.split('+') - material = [x for x in G.materials if - x.ID.count(tmp[0]) == requiredID.count(tmp[0]) and - x.ID.count(tmp[1]) == requiredID.count(tmp[1]) and - x.ID.count(tmp[2]) == requiredID.count(tmp[2]) and - x.ID.count(tmp[3]) == requiredID.count(tmp[3])] + material = [x for x in G.materials if x.ID == requiredID] if material: G.ID[componentID, i, j, k] = material[0].numID From a2475fdee37dd7471e1cc4c37ec99cf3e9742fb7 Mon Sep 17 00:00:00 2001 From: nmannall Date: Thu, 10 Oct 2024 17:34:49 +0100 Subject: [PATCH 03/31] Make the Material class sortable by ID --- gprMax/materials.py | 54 ++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 49 insertions(+), 5 deletions(-) diff --git a/gprMax/materials.py b/gprMax/materials.py index 5de9c7c6..5c9cc06a 100644 --- a/gprMax/materials.py +++ b/gprMax/materials.py @@ -49,6 +49,28 @@ class Material: self.mr = 1.0 self.sm = 0.0 + def __eq__(self, value: object) -> bool: + if isinstance(value, Material): + return self.ID == value.ID + else: + return super().__eq__(value) + + def __lt__(self, value: object) -> bool: + if isinstance(value, Material): + return self.ID < value.ID + else: + raise TypeError( + f"'<' not supported between instances of 'Material' and '{type(value)}'" + ) + + def __gt__(self, value: object) -> bool: + if isinstance(value, Material): + return self.ID > value.ID + else: + raise TypeError( + f"'>' not supported between instances of 'Material' and '{type(value)}'" + ) + def calculate_update_coeffsH(self, G): """Calculates the magnetic update coefficients of the material. @@ -239,7 +261,9 @@ class PeplinskiSoil: by Peplinski (http://dx.doi.org/10.1109/36.387598). """ - def __init__(self, ID, sandfraction, clayfraction, bulkdensity, sandpartdensity, watervolfraction): + def __init__( + self, ID, sandfraction, clayfraction, bulkdensity, sandpartdensity, watervolfraction + ): """ Args: ID: string for name of the soil. @@ -304,7 +328,12 @@ class PeplinskiSoil: muiter = np.nditer(mumaterials, flags=["c_index"]) while not muiter.finished: # Real part for frequencies in the range 1.4GHz to 18GHz - er = (1 + (self.rb / self.rs) * ((es**a) - 1) + (muiter[0] ** b1 * erealw**a) - muiter[0]) ** (1 / a) + er = ( + 1 + + (self.rb / self.rs) * ((es**a) - 1) + + (muiter[0] ** b1 * erealw**a) + - muiter[0] + ) ** (1 / a) # Real part for frequencies in the range 0.3GHz to 1.3GHz (linear # correction to 1.4-18GHz value) er = 1.15 * er - 0.68 @@ -499,7 +528,10 @@ def calculate_water_properties(T=25, S=0): delta = 25 - T beta = ( - 2.033e-2 + 1.266e-4 * delta + 2.464e-6 * delta**2 - S * (1.849e-5 - 2.551e-7 * delta + 2.551e-8 * delta**2) + 2.033e-2 + + 1.266e-4 * delta + + 2.464e-6 * delta**2 + - S * (1.849e-5 - 2.551e-7 * delta + 2.551e-8 * delta**2) ) sig_25s = S * (0.182521 - 1.46192e-3 * S + 2.09324e-5 * S**2 - 1.28205e-7 * S**3) sig = sig_25s * np.exp(-delta * beta) @@ -608,8 +640,20 @@ def process_materials(G): material.calculate_update_coeffsH(G) # Add update coefficients to overall storage for all materials - G.updatecoeffsE[material.numID, :] = material.CA, material.CBx, material.CBy, material.CBz, material.srce - G.updatecoeffsH[material.numID, :] = material.DA, material.DBx, material.DBy, material.DBz, material.srcm + G.updatecoeffsE[material.numID, :] = ( + material.CA, + material.CBx, + material.CBy, + material.CBz, + material.srce, + ) + G.updatecoeffsH[material.numID, :] = ( + material.DA, + material.DBx, + material.DBy, + material.DBz, + material.srcm, + ) # Add update coefficients to overall storage for dispersive materials if hasattr(material, "poles"): From e338167c8df4c09c919d1673ea039481c868946a Mon Sep 17 00:00:00 2001 From: nmannall Date: Thu, 10 Oct 2024 17:41:38 +0100 Subject: [PATCH 04/31] Add helper methods to MPIGrid for updating bounds --- gprMax/grid/mpi_grid.py | 73 +++++++++++++++++++++++++++++++++++++++-- 1 file changed, 70 insertions(+), 3 deletions(-) diff --git a/gprMax/grid/mpi_grid.py b/gprMax/grid/mpi_grid.py index d08d0e76..1fc5cc26 100644 --- a/gprMax/grid/mpi_grid.py +++ b/gprMax/grid/mpi_grid.py @@ -195,7 +195,7 @@ class MPIGrid(FDTDGrid): global_coord: Global grid coordinate. Returns: - local_coord: Local grid coordinate + local_coord: Local grid coordinate. """ return global_coord - self.lower_extent @@ -203,13 +203,80 @@ class MPIGrid(FDTDGrid): """Convert a local grid coordinate to a global grid coordinate. Args: - local_coord: Local grid coordinate + local_coord: Local grid coordinate. Returns: - global_coord: Global grid coordinate + global_coord: Global grid coordinate. """ return local_coord + self.lower_extent + def global_coord_inside_grid( + self, global_coord: npt.NDArray[np.intc], allow_inside_halo: bool = False + ) -> bool: + """Check if a global coordinate falls with in the local grid. + + Args: + global_coord: Global grid coordinate. + allow_inside_halo: If True, the function returns True when + the coordinate is inside the grid halo. Otherwise, it + will return False when the coordinate is inside the grid + halo. (defaults to False) + + Returns: + is_inside_grid: True if the global coordinate falls inside + the local grid bounds. + """ + if allow_inside_halo: + lower_bound = self.lower_extent + upper_bound = self.upper_extent + 1 + else: + lower_bound = self.lower_extent + self.negative_halo_offset + upper_bound = self.upper_extent + + return all(global_coord >= lower_bound) and all(global_coord <= upper_bound) + + def global_bounds_overlap_local_grid( + self, start: npt.NDArray[np.intc], stop: npt.NDArray[np.intc] + ) -> bool: + local_start = self.global_to_local_coordinate(start) + local_stop = self.global_to_local_coordinate(stop) + return all(local_start < self.size) and all(local_stop > self.negative_halo_offset) + + def limit_global_bounds_to_within_local_grid( + self, + start: npt.NDArray[np.intc], + stop: npt.NDArray[np.intc], + step: npt.NDArray[np.intc] = np.ones(3, dtype=np.intc), + ) -> Tuple[npt.NDArray[np.intc], npt.NDArray[np.intc], npt.NDArray[np.intc]]: + local_start = self.global_to_local_coordinate(start) + + # Bring start into the local grid (and not in the negative halo) + # local_start must still be aligned with the provided step. + local_start = np.where( + local_start < self.negative_halo_offset, + self.negative_halo_offset + ((local_start - self.negative_halo_offset) % step), + local_start, + ) + + local_stop = self.global_to_local_coordinate(stop) + + # Limit local_stop such that it is at most one step beyond the + # max index of the grid. As local_stop is the upper bound, it is + # exclusive, meaning when used to slice an array (with the + # provided step), the last element accessed will one step below + # local_stop. + # Note: using self.size as an index in any dimension would fall + # in the positive halo (this counts as outside the local grid). + local_stop = np.where( + local_stop > self.size, + self.size + ((local_stop - self.size) % step), + local_stop, + ) + + offset = self.local_to_global_coordinate(local_start) - start + + return local_start, local_stop, offset + def scatter_coord_objects(self, objects: List[CoordType]) -> List[CoordType]: """Scatter coord objects to the correct MPI rank. From e7f5f6260f97d5c4b55365670d21aef673efb97e Mon Sep 17 00:00:00 2001 From: nmannall Date: Thu, 10 Oct 2024 17:42:58 +0100 Subject: [PATCH 05/31] Make comment explaining the extra step clearer --- gprMax/snapshots.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/gprMax/snapshots.py b/gprMax/snapshots.py index 210bd36d..909f621b 100644 --- a/gprMax/snapshots.py +++ b/gprMax/snapshots.py @@ -375,7 +375,7 @@ class MPISnapshot(Snapshot): self.size = np.ceil((self.stop - self.start) / self.step).astype(np.intc) return super().initialise_snapfields() - def has_neighbour(self, dimension: Dim, direction: Dir): + def has_neighbour(self, dimension: Dim, direction: Dir) -> bool: return self.neighbours[dimension][direction] != -1 def store(self, G): @@ -393,13 +393,13 @@ class MPISnapshot(Snapshot): self.neighbours[Dim.Y] = self.comm.Shift(direction=Dim.Y, disp=1) self.neighbours[Dim.Z] = self.comm.Shift(direction=Dim.Z, disp=1) - # If we have a positive neighbour, an extra step may land in our - # local halo. The numbers here will be correct (except the top - # corner), but using it would require extra checks below. + # If we do not have a positive neighbour, add an extra step to + # make the upper bound inclusive. Otherwise the additional step + # will be provided by the received halo. slice_stop = np.where( - self.neighbours[:, Dir.POS] != -1, - self.stop, + self.neighbours[:, Dir.POS] == -1, self.stop + self.step, + self.stop, ) self.slice = list(map(slice, self.start, slice_stop, self.step)) From 4355586328f4e9630e424fa3f98f29160811577d Mon Sep 17 00:00:00 2001 From: nmannall Date: Thu, 10 Oct 2024 17:44:45 +0100 Subject: [PATCH 06/31] Add initial MPIGeometryObject implementation --- gprMax/cmds_multiuse.py | 8 ++- gprMax/geometry_outputs.py | 138 +++++++++++++++++++++++++++++++++++++ gprMax/mpi_model.py | 32 +++++++-- 3 files changed, 173 insertions(+), 5 deletions(-) diff --git a/gprMax/cmds_multiuse.py b/gprMax/cmds_multiuse.py index a4101984..55cc64fb 100644 --- a/gprMax/cmds_multiuse.py +++ b/gprMax/cmds_multiuse.py @@ -37,6 +37,7 @@ from .cmds_geometry.cmds_geometry import ( rotate_polarisation, ) from .geometry_outputs import GeometryObjects as GeometryObjectsUser +from .geometry_outputs import MPIGeometryObjects as MPIGeometryObjectsUser from .materials import DispersiveMaterial as DispersiveMaterialUser from .materials import ListMaterial as ListMaterialUser from .materials import Material as MaterialUser @@ -1996,7 +1997,12 @@ class GeometryObjectsWrite(UserObjectMulti): x0, y0, z0 = p1 x1, y1, z1 = p2 - g = GeometryObjectsUser(x0, y0, z0, x1, y1, z1, basefilename) + if isinstance(grid, MPIGrid): + geometry_object_type = MPIGeometryObjectsUser + else: + geometry_object_type = GeometryObjectsUser + + g = geometry_object_type(x0, y0, z0, x1, y1, z1, basefilename) logger.info( f"Geometry objects in the volume from {p1[0] * grid.dx:g}m, " diff --git a/gprMax/geometry_outputs.py b/gprMax/geometry_outputs.py index 56c22c01..d542ee05 100644 --- a/gprMax/geometry_outputs.py +++ b/gprMax/geometry_outputs.py @@ -19,16 +19,20 @@ import json import logging import sys +from itertools import chain from pathlib import Path import h5py import numpy as np from evtk.hl import imageToVTK, linesToVTK from evtk.vtk import VtkGroup, VtkImageData, VtkUnstructuredGrid +from mpi4py import MPI from tqdm import tqdm import gprMax.config as config from gprMax.grid.fdtd_grid import FDTDGrid +from gprMax.grid.mpi_grid import MPIGrid +from gprMax.materials import Material from ._version import __version__ from .cython.geometry_outputs import write_lines @@ -471,3 +475,137 @@ class GeometryObjects: ) dispersionstr += material.ID fmaterials.write(dispersionstr + "\n") + + +class MPIGeometryObjects(GeometryObjects): + # def __init__(self, start, stop, global_size, comm): + # super().__init__(...) + + def write_hdf5(self, title: str, G: MPIGrid, pbar: tqdm): + """Writes a geometry objects file in HDF5 format. + + Args: + G: FDTDGrid class describing a grid in a model. + pbar: Progress bar class instance. + """ + + # Get neighbours + self.neighbours = np.full((3, 2), -1, dtype=int) + self.neighbours[0] = self.comm.Shift(direction=0, disp=1) + self.neighbours[1] = self.comm.Shift(direction=1, disp=1) + self.neighbours[2] = self.comm.Shift(direction=2, disp=1) + + # Make subsection of array one larger if no positive neighbour + slice_stop = np.where( + self.neighbours[:, 1] == -1, + self.stop + 1, + self.stop, + ) + array_slice = list(map(slice, self.start, slice_stop)) + + ID = G.ID[:, array_slice[0], array_slice[1], array_slice[2]] + + # Get materials present in subset of ID + local_material_num_ids, inverse_map = np.unique(ID, return_inverse=True) + get_material = np.vectorize(lambda id: G.materials[id]) + local_materials = get_material(local_material_num_ids) + + # Send all materials to the coordinating rank + materials = self.comm.gather(local_materials, root=0) + + if self.comm.rank == 0: + # Filter out duplicate materials + materials = np.fromiter(chain.from_iterable(materials), dtype=Material) + _, indices = np.unique(materials, return_index=True) + + # We want to sort the materials by the order they appear, + # not sorted alphabetically by ID (default behaviour using + # np.unique). This ensures user defined materials are lower + # indexed than compound materials. + global_materials = materials[sorted(indices)] + + global_material_ids = [m.ID for m in global_materials] + else: + global_material_ids = None + + global_material_ids = self.comm.bcast(global_material_ids, root=0) + + # Create map from material ID (str) to global material numID + materials_map = { + material_id: index for index, material_id in enumerate(global_material_ids) + } + + # Remap ID array to the global material numID + ID = np.array([materials_map[m.ID] for m in local_materials])[inverse_map].reshape(ID.shape) + + # Other geometry arrays do not have halos, so adjustment to + # 'stop' is not necessary + array_slice = list(map(slice, self.start, self.stop)) + + data = G.solid[array_slice[0], array_slice[1], array_slice[2]] + map_local_materials = np.vectorize(lambda id: materials_map[G.materials[id].ID]) + data = map_local_materials(data) + + rigidE = G.rigidE[:, array_slice[0], array_slice[1], array_slice[2]] + rigidH = G.rigidH[:, array_slice[0], array_slice[1], array_slice[2]] + + start = self.offset + stop = start + self.size + + with h5py.File(self.filename_hdf5, "w", driver="mpio", comm=self.comm) as fdata: + fdata.attrs["gprMax"] = __version__ + fdata.attrs["Title"] = title + fdata.attrs["dx_dy_dz"] = (G.dx, G.dy, G.dz) + + dset = fdata.create_dataset("/data", self.global_size) + dset[start[0] : stop[0], start[1] : stop[1], start[2] : stop[2]] = data + pbar.update(self.solidsize) + + rigid_E_dataset = fdata.create_dataset("/rigidE", (12, *self.global_size)) + rigid_E_dataset[:, start[0] : stop[0], start[1] : stop[1], start[2] : stop[2]] = rigidE + + rigid_H_dataset = fdata.create_dataset("/rigidH", (6, *self.global_size)) + rigid_H_dataset[:, start[0] : stop[0], start[1] : stop[1], start[2] : stop[2]] = rigidH + pbar.update(self.rigidsize) + + stop = np.where( + self.neighbours[:, 1] == -1, + stop + 1, + stop, + ) + + dset = fdata.create_dataset("/ID", (6, *(self.global_size + 1))) + dset[:, start[0] : stop[0], start[1] : stop[1], start[2] : stop[2]] = ID + pbar.update(self.IDsize) + + # Write materials list to a text file + if self.comm.rank == 0: + with open(self.filename_materials, "w") as fmaterials: + for material in global_materials: + fmaterials.write( + f"#material: {material.er:g} {material.se:g} " + f"{material.mr:g} {material.sm:g} {material.ID}\n" + ) + if hasattr(material, "poles"): + if "debye" in material.type: + dispersionstr = "#add_dispersion_debye: " f"{material.poles:g} " + for pole in range(material.poles): + dispersionstr += ( + f"{material.deltaer[pole]:g} " f"{material.tau[pole]:g} " + ) + elif "lorenz" in material.type: + dispersionstr = f"#add_dispersion_lorenz: " f"{material.poles:g} " + for pole in range(material.poles): + dispersionstr += ( + f"{material.deltaer[pole]:g} " + f"{material.tau[pole]:g} " + f"{material.alpha[pole]:g} " + ) + elif "drude" in material.type: + dispersionstr = f"#add_dispersion_drude: " f"{material.poles:g} " + for pole in range(material.poles): + dispersionstr += ( + f"{material.tau[pole]:g} " f"{material.alpha[pole]:g} " + ) + dispersionstr += material.ID + fmaterials.write(dispersionstr + "\n") diff --git a/gprMax/mpi_model.py b/gprMax/mpi_model.py index eeb2bc8c..0c7d03d4 100644 --- a/gprMax/mpi_model.py +++ b/gprMax/mpi_model.py @@ -1,6 +1,7 @@ import logging from typing import Optional +import numpy as np from mpi4py import MPI from gprMax import config @@ -55,7 +56,9 @@ class MPIModel(Model): def build_geometry(self): self._broadcast_model() - return super().build_geometry() + super().build_geometry() + + self._filter_geometry_objects() def _broadcast_model(self): self.nx = self.comm.bcast(self.nx) @@ -74,6 +77,30 @@ class MPIModel(Model): model_config.materials["maxpoles"] = self.comm.bcast(model_config.materials["maxpoles"]) model_config.ompthreads = self.comm.bcast(model_config.ompthreads) + def _filter_geometry_objects(self): + objects = self.comm.bcast(self.geometryobjects) + self.geometryobjects = [] + + for go in objects: + start = np.array([go.xs, go.ys, go.zs], dtype=np.intc) + stop = np.array([go.xf, go.yf, go.zf], dtype=np.intc) + if self.G.global_bounds_overlap_local_grid(start, stop): + comm = self.comm.Split() + assert isinstance(comm, MPI.Intracomm) + start_grid_coord = self.G.get_grid_coord_from_coordinate(start) + stop_grid_coord = self.G.get_grid_coord_from_coordinate(stop) + 1 + go.comm = comm.Create_cart((stop_grid_coord - start_grid_coord).tolist()) + + go.global_size = np.array([go.nx, go.ny, go.nz], dtype=np.intc) + start, stop, offset = self.G.limit_global_bounds_to_within_local_grid(start, stop) + go.size = stop - start + go.start = start + go.stop = stop + go.offset = offset + self.geometryobjects.append(go) + else: + self.comm.Split(MPI.UNDEFINED) + def write_output_data(self): """Writes output data, i.e. field data for receivers and snapshots to file(s). @@ -89,9 +116,6 @@ class MPIModel(Model): self.G.size = self.G.global_size write_hdf5_outputfile(config.get_model_config().output_file_path_ext, self.title, self) - def _output_geometry(self): - logger.warn("Geometry views and geometry objects are not currently supported with MPI\n") - def _create_grid(self) -> MPIGrid: cart_comm = MPI.COMM_WORLD.Create_cart(config.sim_config.mpi) return MPIGrid(cart_comm) From 1572a8e8239249ac787006526ddb5cbae0311f0f Mon Sep 17 00:00:00 2001 From: nmannall Date: Thu, 10 Oct 2024 17:49:54 +0100 Subject: [PATCH 07/31] Refactor writing a material to file to a seperate method --- gprMax/geometry_outputs.py | 84 +++++++++++++------------------------- 1 file changed, 29 insertions(+), 55 deletions(-) diff --git a/gprMax/geometry_outputs.py b/gprMax/geometry_outputs.py index d542ee05..e8733326 100644 --- a/gprMax/geometry_outputs.py +++ b/gprMax/geometry_outputs.py @@ -19,6 +19,7 @@ import json import logging import sys +from io import TextIOWrapper from itertools import chain from pathlib import Path @@ -448,33 +449,32 @@ class GeometryObjects: for numID in range(minmat, maxmat + 1): for material in G.materials: if material.numID == numID: - fmaterials.write( - f"#material: {material.er:g} {material.se:g} " - f"{material.mr:g} {material.sm:g} {material.ID}\n" - ) - if hasattr(material, "poles"): - if "debye" in material.type: - dispersionstr = "#add_dispersion_debye: " f"{material.poles:g} " - for pole in range(material.poles): - dispersionstr += ( - f"{material.deltaer[pole]:g} " f"{material.tau[pole]:g} " - ) - elif "lorenz" in material.type: - dispersionstr = f"#add_dispersion_lorenz: " f"{material.poles:g} " - for pole in range(material.poles): - dispersionstr += ( - f"{material.deltaer[pole]:g} " - f"{material.tau[pole]:g} " - f"{material.alpha[pole]:g} " - ) - elif "drude" in material.type: - dispersionstr = f"#add_dispersion_drude: " f"{material.poles:g} " - for pole in range(material.poles): - dispersionstr += ( - f"{material.tau[pole]:g} " f"{material.alpha[pole]:g} " - ) - dispersionstr += material.ID - fmaterials.write(dispersionstr + "\n") + self.output_material(material, fmaterials) + + def output_material(self, material: Material, file: TextIOWrapper): + file.write( + f"#material: {material.er:g} {material.se:g} " + f"{material.mr:g} {material.sm:g} {material.ID}\n" + ) + if hasattr(material, "poles"): + if "debye" in material.type: + dispersionstr = "#add_dispersion_debye: " f"{material.poles:g} " + for pole in range(material.poles): + dispersionstr += f"{material.deltaer[pole]:g} " f"{material.tau[pole]:g} " + elif "lorenz" in material.type: + dispersionstr = f"#add_dispersion_lorenz: " f"{material.poles:g} " + for pole in range(material.poles): + dispersionstr += ( + f"{material.deltaer[pole]:g} " + f"{material.tau[pole]:g} " + f"{material.alpha[pole]:g} " + ) + elif "drude" in material.type: + dispersionstr = f"#add_dispersion_drude: " f"{material.poles:g} " + for pole in range(material.poles): + dispersionstr += f"{material.tau[pole]:g} " f"{material.alpha[pole]:g} " + dispersionstr += material.ID + file.write(dispersionstr + "\n") class MPIGeometryObjects(GeometryObjects): @@ -580,32 +580,6 @@ class MPIGeometryObjects(GeometryObjects): # Write materials list to a text file if self.comm.rank == 0: - with open(self.filename_materials, "w") as fmaterials: + with open(self.filename_materials, "w") as materials_file: for material in global_materials: - fmaterials.write( - f"#material: {material.er:g} {material.se:g} " - f"{material.mr:g} {material.sm:g} {material.ID}\n" - ) - if hasattr(material, "poles"): - if "debye" in material.type: - dispersionstr = "#add_dispersion_debye: " f"{material.poles:g} " - for pole in range(material.poles): - dispersionstr += ( - f"{material.deltaer[pole]:g} " f"{material.tau[pole]:g} " - ) - elif "lorenz" in material.type: - dispersionstr = f"#add_dispersion_lorenz: " f"{material.poles:g} " - for pole in range(material.poles): - dispersionstr += ( - f"{material.deltaer[pole]:g} " - f"{material.tau[pole]:g} " - f"{material.alpha[pole]:g} " - ) - elif "drude" in material.type: - dispersionstr = f"#add_dispersion_drude: " f"{material.poles:g} " - for pole in range(material.poles): - dispersionstr += ( - f"{material.tau[pole]:g} " f"{material.alpha[pole]:g} " - ) - dispersionstr += material.ID - fmaterials.write(dispersionstr + "\n") + self.output_material(material, materials_file) From 24157076a4db440091ae6adc29735cc184951084 Mon Sep 17 00:00:00 2001 From: nmannall Date: Fri, 11 Oct 2024 14:17:29 +0100 Subject: [PATCH 08/31] Specify dtype of h5 datasets when they are created --- gprMax/geometry_outputs.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/gprMax/geometry_outputs.py b/gprMax/geometry_outputs.py index e8733326..abddb07a 100644 --- a/gprMax/geometry_outputs.py +++ b/gprMax/geometry_outputs.py @@ -557,14 +557,18 @@ class MPIGeometryObjects(GeometryObjects): fdata.attrs["Title"] = title fdata.attrs["dx_dy_dz"] = (G.dx, G.dy, G.dz) - dset = fdata.create_dataset("/data", self.global_size) + dset = fdata.create_dataset("/data", self.global_size, dtype=data.dtype) dset[start[0] : stop[0], start[1] : stop[1], start[2] : stop[2]] = data pbar.update(self.solidsize) - rigid_E_dataset = fdata.create_dataset("/rigidE", (12, *self.global_size)) + rigid_E_dataset = fdata.create_dataset( + "/rigidE", (12, *self.global_size), dtype=rigidE.dtype + ) rigid_E_dataset[:, start[0] : stop[0], start[1] : stop[1], start[2] : stop[2]] = rigidE - rigid_H_dataset = fdata.create_dataset("/rigidH", (6, *self.global_size)) + rigid_H_dataset = fdata.create_dataset( + "/rigidH", (6, *self.global_size), dtype=rigidH.dtype + ) rigid_H_dataset[:, start[0] : stop[0], start[1] : stop[1], start[2] : stop[2]] = rigidH pbar.update(self.rigidsize) @@ -574,7 +578,7 @@ class MPIGeometryObjects(GeometryObjects): stop, ) - dset = fdata.create_dataset("/ID", (6, *(self.global_size + 1))) + dset = fdata.create_dataset("/ID", (6, *(self.global_size + 1)), dtype=ID.dtype) dset[:, start[0] : stop[0], start[1] : stop[1], start[2] : stop[2]] = ID pbar.update(self.IDsize) From 272fc5d1ffe9b10b2c58d2cd10738e758b11365c Mon Sep 17 00:00:00 2001 From: nmannall Date: Mon, 14 Oct 2024 10:28:32 +0100 Subject: [PATCH 09/31] Remove extra materials output by serial geometry objects --- gprMax/geometry_outputs.py | 56 +++++++++++++++++++------------------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/gprMax/geometry_outputs.py b/gprMax/geometry_outputs.py index abddb07a..fe7b129d 100644 --- a/gprMax/geometry_outputs.py +++ b/gprMax/geometry_outputs.py @@ -411,45 +411,45 @@ class GeometryObjects: pbar: Progress bar class instance. """ + ID = G.ID[:, self.xs : self.xf + 1, self.ys : self.yf + 1, self.zs : self.zf + 1] + + # Get materials present in subset of ID array + material_ids, inverse_map = np.unique(ID, return_inverse=True) + + # Create map from material ID to 0 - number of materials + materials_map = {material_id: index for index, material_id in enumerate(material_ids)} + + # Remap ID array to the reduced list of materials + ID = np.array([materials_map[id] for id in material_ids])[inverse_map].reshape(ID.shape) + + data = G.solid[self.xs : self.xf + 1, self.ys : self.yf + 1, self.zs : self.zf + 1].astype( + "int16" + ) + map_materials = np.vectorize(lambda id: materials_map[id]) + data = map_materials(data) + + rigidE = G.rigidE[:, self.xs : self.xf + 1, self.ys : self.yf + 1, self.zs : self.zf + 1] + rigidH = G.rigidH[:, self.xs : self.xf + 1, self.ys : self.yf + 1, self.zs : self.zf + 1] + with h5py.File(self.filename_hdf5, "w") as fdata: fdata.attrs["gprMax"] = __version__ fdata.attrs["Title"] = title fdata.attrs["dx_dy_dz"] = (G.dx, G.dy, G.dz) - # Get minimum and maximum integers of materials in geometry objects volume - minmat = np.amin( - G.ID[:, self.xs : self.xf + 1, self.ys : self.yf + 1, self.zs : self.zf + 1] - ) - maxmat = np.amax( - G.ID[:, self.xs : self.xf + 1, self.ys : self.yf + 1, self.zs : self.zf + 1] - ) - fdata["/data"] = ( - G.solid[self.xs : self.xf + 1, self.ys : self.yf + 1, self.zs : self.zf + 1].astype( - "int16" - ) - - minmat - ) + fdata["/data"] = data pbar.update(self.solidsize) - fdata["/rigidE"] = G.rigidE[ - :, self.xs : self.xf + 1, self.ys : self.yf + 1, self.zs : self.zf + 1 - ] - fdata["/rigidH"] = G.rigidH[ - :, self.xs : self.xf + 1, self.ys : self.yf + 1, self.zs : self.zf + 1 - ] + + fdata["/rigidE"] = rigidE + fdata["/rigidH"] = rigidH pbar.update(self.rigidsize) - fdata["/ID"] = ( - G.ID[:, self.xs : self.xf + 1, self.ys : self.yf + 1, self.zs : self.zf + 1] - - minmat - ) + + fdata["/ID"] = ID pbar.update(self.IDsize) # Write materials list to a text file - # This includes all materials in range whether used in volume or not with open(self.filename_materials, "w") as fmaterials: - for numID in range(minmat, maxmat + 1): - for material in G.materials: - if material.numID == numID: - self.output_material(material, fmaterials) + for numID in material_ids: + self.output_material(G.materials[numID], fmaterials) def output_material(self, material: Material, file: TextIOWrapper): file.write( From 35133525a74c81304664c73c8ecc6103f5c60316 Mon Sep 17 00:00:00 2001 From: nmannall Date: Mon, 14 Oct 2024 10:32:11 +0100 Subject: [PATCH 10/31] Correct array slice boundaries for serial geometry objects --- gprMax/geometry_outputs.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/gprMax/geometry_outputs.py b/gprMax/geometry_outputs.py index fe7b129d..1068a88e 100644 --- a/gprMax/geometry_outputs.py +++ b/gprMax/geometry_outputs.py @@ -422,14 +422,12 @@ class GeometryObjects: # Remap ID array to the reduced list of materials ID = np.array([materials_map[id] for id in material_ids])[inverse_map].reshape(ID.shape) - data = G.solid[self.xs : self.xf + 1, self.ys : self.yf + 1, self.zs : self.zf + 1].astype( - "int16" - ) + data = G.solid[self.xs : self.xf, self.ys : self.yf, self.zs : self.zf].astype("int16") map_materials = np.vectorize(lambda id: materials_map[id]) data = map_materials(data) - rigidE = G.rigidE[:, self.xs : self.xf + 1, self.ys : self.yf + 1, self.zs : self.zf + 1] - rigidH = G.rigidH[:, self.xs : self.xf + 1, self.ys : self.yf + 1, self.zs : self.zf + 1] + rigidE = G.rigidE[:, self.xs : self.xf, self.ys : self.yf, self.zs : self.zf] + rigidH = G.rigidH[:, self.xs : self.xf, self.ys : self.yf, self.zs : self.zf] with h5py.File(self.filename_hdf5, "w") as fdata: fdata.attrs["gprMax"] = __version__ From 3e575d9c4d728b14b38b5576f05a43a92bd8ece6 Mon Sep 17 00:00:00 2001 From: nmannall Date: Fri, 1 Nov 2024 15:55:36 +0000 Subject: [PATCH 11/31] Use np.single dtype rather than float --- gprMax/grid/fdtd_grid.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gprMax/grid/fdtd_grid.py b/gprMax/grid/fdtd_grid.py index bb712eb1..021e54a0 100644 --- a/gprMax/grid/fdtd_grid.py +++ b/gprMax/grid/fdtd_grid.py @@ -58,7 +58,7 @@ class FDTDGrid: self.nx = 0 self.ny = 0 self.nz = 0 - self.dl = np.ones(3, dtype=float) + self.dl = np.ones(3, dtype=np.single) self.dt = 0.0 # Field Arrays From 2eb54832d724dae2c3c62cf68350f98fded6fd64 Mon Sep 17 00:00:00 2001 From: nmannall Date: Fri, 1 Nov 2024 16:02:27 +0000 Subject: [PATCH 12/31] Create class to write VTKHDF Image Data files --- gprMax/vtkhdf.py | 304 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 304 insertions(+) create mode 100644 gprMax/vtkhdf.py diff --git a/gprMax/vtkhdf.py b/gprMax/vtkhdf.py new file mode 100644 index 00000000..f9403de9 --- /dev/null +++ b/gprMax/vtkhdf.py @@ -0,0 +1,304 @@ +import logging +from abc import abstractmethod +from contextlib import AbstractContextManager +from os import PathLike +from pathlib import Path +from types import TracebackType +from typing import Literal, Optional, Union + +import h5py +import numpy as np +import numpy.typing as npt +from mpi4py import MPI + +logger = logging.getLogger(__name__) + + +class VtkHdfFile(AbstractContextManager): + VERSION = [2, 2] + FILE_EXTENSION = ".vtkhdf" + ROOT_GROUP = "VTKHDF" + + @property + @abstractmethod + def TYPE(self) -> str: + pass + + def __init__(self, filename: Union[str, PathLike], comm: Optional[MPI.Comm] = None) -> None: + """Create a new VtkHdfFile. + + If the file already exists, it will be overriden. Required + attributes (Type and Version) will be written to the file. + + The file will be opened using the 'mpio' h5py driver if an MPI + communicator is provided. + + Args: + filename: Name of the file (can be a file path). The file + extension will be set to '.vtkhdf'. + comm (optional): MPI communicator containing all ranks that + want to write to the file. + + """ + # Ensure the filename uses the correct extension + self.filename = Path(filename) + if self.filename.suffix != "" and self.filename.suffix != self.FILE_EXTENSION: + logger.warning( + f"Invalid file extension '{self.filename.suffix}' for VTKHDF file. Changing to '{self.FILE_EXTENSION}'." + ) + + self.filename = self.filename.with_suffix(self.FILE_EXTENSION) + + self.comm = comm + + # Check if the filehandler should use an MPI driver + if self.comm is None: + self.file_handler = h5py.File(self.filename, "w") + else: + self.file_handler = h5py.File(self.filename, "w", driver="mpio", comm=self.comm) + + self.root_group = self.file_handler.create_group(self.ROOT_GROUP) + + # Set required Version and Type root attributes + self._set_root_attribute("Version", self.VERSION) + + type_as_ascii = self.TYPE.encode("ascii") + self._set_root_attribute( + "Type", type_as_ascii, h5py.string_dtype("ascii", len(type_as_ascii)) + ) + + def __exit__( + self, + exc_type: Optional[type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> Optional[bool]: + """Close the file when the context is exited. + + The parameters describe the exception that caused the context to + be exited. If the context was exited without an exception, all + three arguments will be None. Any exception will be + processed normally upon exit from this method. + + Returns: + suppress_exception (optional): Returns True if the exception + should be suppressed (i.e. not propagated). Otherwise, + the exception will be processed normally upon exit from + this method. + """ + self.close() + + def close(self) -> None: + """Close the file handler""" + self.file_handler.close() + + def _get_root_attribute(self, attribute: str) -> npt.NDArray: + """Get attribute from the root VTKHDF group if it exists. + + Args: + attribute: Name of the attribute. + + Raises: + KeyError: Raised if the attribute is not present as a key. + """ + value = self.root_group.attrs[attribute] + if isinstance(value, h5py.Empty): + raise KeyError(f"Attribute '{attribute}' not present in /{self.ROOT_GROUP} group") + return value + + def _set_root_attribute( + self, attribute: str, value: npt.ArrayLike, dtype: npt.DTypeLike = None + ): + """Set attribute in the root VTKHDF group. + + Args: + attribute: Name of the new attribute. + value: An array to initialize the attribute. + dtype (optional): Data type of the attribute. Overrides + value.dtype if both are given. + """ + self.root_group.attrs.create(attribute, value, dtype=dtype) + + def _build_dataset_path(self, *path: str) -> str: + """Build an HDF5 dataset path attached to the root VTKHDF group. + + Args: + *path: Components of the required path. + """ + return "/".join([self.ROOT_GROUP, *path]) + + def _write_dataset( + self, + path: str, + data: npt.NDArray, + shape: Optional[npt.NDArray[np.intc]] = None, + offset: Optional[npt.NDArray[np.intc]] = None, + ): + """Write the specified dataset to the file. + + Args: + path: Absolute path to the dataset. + data: Data to initialize the dataset. + shape (optional): Size of the full dataset being created. + Can be omitted if data provides the full dataset. + offset (optional): Offset to store the provided data at. Can + be omitted if data provides the full dataset. + """ + + # VTKHDF stores datasets using ZYX ordering rather than XYZ + data = data.transpose() + + if shape is not None: + shape = np.flip(shape) + + if offset is not None: + offset = np.flip(offset) + + if shape is None or all(shape == data.shape): + self.file_handler.create_dataset(path, data=data) + else: + dimensions = len(data.shape) + if offset is None: + offset = np.zeros(dimensions, dtype=np.intc) + + if dimensions != len(shape): + raise ValueError( + "The data and specified shape must have the same number of dimensions." + f" {dimensions} != {len(shape)}" + ) + + if dimensions != len(offset): + raise ValueError( + "The data and specified offset must have the same number of dimensions." + f" {dimensions} != {len(offset)}" + ) + + if any(offset + data.shape > shape): + raise ValueError( + "The provided offset and data does not fit within the bounds of the dataset." + f" {offset} + {data.shape} = {offset + data.shape} > {shape}" + ) + + dataset = self.file_handler.create_dataset(path, shape, data.dtype) + + start = offset + stop = offset + data.shape + + dataset_slice = (slice(start[i], stop[i]) for i in range(dimensions)) + + dataset[dataset_slice] = data + + def add_point_data( + self, + name: str, + data: npt.NDArray, + shape: Optional[npt.NDArray[np.intc]] = None, + offset: Optional[npt.NDArray[np.intc]] = None, + ): + dataset_path = self._build_dataset_path("PointData", name) + self._write_dataset(dataset_path, data, shape, offset) + + def add_field_data( + self, + name: str, + data: npt.NDArray, + shape: Optional[npt.NDArray[np.intc]] = None, + offset: Optional[npt.NDArray[np.intc]] = None, + ): + dataset_path = self._build_dataset_path("FieldData", name) + self._write_dataset(dataset_path, data, shape, offset) + + def add_cell_data( + self, + name: str, + data: npt.NDArray, + shape: Optional[npt.NDArray[np.intc]] = None, + offset: Optional[npt.NDArray[np.intc]] = None, + ): + dataset_path = self._build_dataset_path("CellData", name) + self._write_dataset(dataset_path, data, shape, offset) + + +class VtkImageData(VtkHdfFile): + DIRECTION_ATTR = "Direction" + ORIGIN_ATTR = "Origin" + SPACING_ATTR = "Spacing" + WHOLE_EXTENT_ATTR = "WholeExtent" + + @property + def TYPE(self) -> Literal["ImageData"]: + return "ImageData" + + def __init__( + self, + filename: Union[str, PathLike], + shape: npt.NDArray[np.intc], + origin: Optional[npt.NDArray[np.single]] = None, + spacing: Optional[npt.NDArray[np.single]] = None, + direction: Optional[npt.NDArray[np.single]] = None, + comm: Optional[MPI.Cartcomm] = None, + ) -> None: + super().__init__(filename, comm) + + self.shape = shape + self.points_shape = shape + 1 + whole_extent = np.zeros(2 * len(self.shape), dtype=np.intc) + whole_extent[1::2] = self.shape + self._set_root_attribute(self.WHOLE_EXTENT_ATTR, whole_extent) + + if origin is None: + origin = np.zeros(len(self.shape), dtype=np.single) + self.set_origin(origin) + + if spacing is None: + spacing = np.ones(len(self.shape), dtype=np.single) + self.set_spacing(spacing) + + if direction is None: + direction = np.diag(np.ones(len(self.shape), dtype=np.single)).flatten() + self.set_direction(direction) + + @property + def whole_extent(self) -> npt.NDArray[np.intc]: + return self._get_root_attribute(self.WHOLE_EXTENT_ATTR) + + @property + def origin(self) -> npt.NDArray[np.single]: + return self._get_root_attribute(self.ORIGIN_ATTR) + + @property + def spacing(self) -> npt.NDArray[np.single]: + return self._get_root_attribute(self.SPACING_ATTR) + + @property + def direction(self) -> npt.NDArray[np.single]: + return self._get_root_attribute(self.DIRECTION_ATTR) + + def set_origin(self, origin: npt.NDArray[np.single]): + self._set_root_attribute(self.ORIGIN_ATTR, origin) + + def set_spacing(self, spacing: npt.NDArray[np.single]): + self._set_root_attribute(self.SPACING_ATTR, spacing) + + def set_direction(self, direction: npt.NDArray[np.single]): + self._set_root_attribute(self.DIRECTION_ATTR, direction) + + def add_point_data( + self, name: str, data: npt.NDArray, offset: Optional[npt.NDArray[np.intc]] = None + ): + if offset is None and any(data.shape != self.points_shape): # type: ignore + raise ValueError( + f"If no offset is specified, data.shape {data.shape} must match the shape of the" + f" VtkImageData point datasets {self.points_shape}" + ) + return super().add_point_data(name, data, self.points_shape, offset) + + def add_cell_data( + self, name: str, data: npt.NDArray, offset: Optional[npt.NDArray[np.intc]] = None + ): + if offset is None and any(data.shape != self.shape): # type: ignore + raise ValueError( + f"If no offset is specified, data.shape {data.shape} must match the shape of the" + f" VtkImageData {self.shape}" + ) + return super().add_cell_data(name, data, self.shape, offset) From e8051bc72d4638a7cbd7cad98ce5aeb5168ad148 Mon Sep 17 00:00:00 2001 From: nmannall Date: Fri, 1 Nov 2024 16:07:17 +0000 Subject: [PATCH 13/31] Use numpy arrays to store geometry view extents --- gprMax/geometry_outputs.py | 81 ++++++++++++++++++++++++++++++++------ 1 file changed, 68 insertions(+), 13 deletions(-) diff --git a/gprMax/geometry_outputs.py b/gprMax/geometry_outputs.py index 56c22c01..7d2b507c 100644 --- a/gprMax/geometry_outputs.py +++ b/gprMax/geometry_outputs.py @@ -69,7 +69,20 @@ def save_geometry_views(gvs): class GeometryView: """Base class for Geometry Views.""" - def __init__(self, xs, ys, zs, xf, yf, zf, dx, dy, dz, filename, grid): + def __init__( + self, + xs: int, + ys: int, + zs: int, + xf: int, + yf: int, + zf: int, + dx: int, + dy: int, + dz: int, + filename: str, + grid: FDTDGrid, + ): """ Args: xs, xf, ys, yf, zs, zf: ints for extent of geometry view in cells. @@ -78,23 +91,65 @@ class GeometryView: grid: FDTDGrid class describing a grid in a model. """ - self.xs = xs - self.ys = ys - self.zs = zs - self.xf = xf - self.yf = yf - self.zf = zf - self.nx = self.xf - self.xs - self.ny = self.yf - self.ys - self.nz = self.zf - self.zs - self.dx = dx - self.dy = dy - self.dz = dz + self.start = np.array([xs, ys, zs], dtype=np.intc) + self.stop = np.array([xf, yf, zf], dtype=np.intc) + self.step = np.array([dx, dy, dz], dtype=np.intc) + self.size = self.stop - self.start + self.filename = filename self.filenamebase = filename self.grid = grid self.nbytes = None + # Properties for backwards compatibility + @property + def xs(self) -> int: + return self.start[0] + + @property + def ys(self) -> int: + return self.start[1] + + @property + def zs(self) -> int: + return self.start[2] + + @property + def xf(self) -> int: + return self.stop[0] + + @property + def yf(self) -> int: + return self.stop[1] + + @property + def zf(self) -> int: + return self.stop[2] + + @property + def dx(self) -> int: + return self.step[0] + + @property + def dy(self) -> int: + return self.step[1] + + @property + def dz(self) -> int: + return self.step[2] + + @property + def nx(self) -> int: + return self.size[0] + + @property + def ny(self) -> int: + return self.size[1] + + @property + def nz(self) -> int: + return self.size[2] + def set_filename(self): """Constructs filename from user-supplied name and model run number.""" parts = config.get_model_config().output_file_path.parts From 3bffeb64507ecfa13095254888de1d8b4cb33493 Mon Sep 17 00:00:00 2001 From: nmannall Date: Fri, 1 Nov 2024 16:10:11 +0000 Subject: [PATCH 14/31] Change GeometryViewVoxel to use VTKHDF file format --- gprMax/geometry_outputs.py | 36 +++++++++++++++++------------------- 1 file changed, 17 insertions(+), 19 deletions(-) diff --git a/gprMax/geometry_outputs.py b/gprMax/geometry_outputs.py index 7d2b507c..ee13a07b 100644 --- a/gprMax/geometry_outputs.py +++ b/gprMax/geometry_outputs.py @@ -23,12 +23,13 @@ from pathlib import Path import h5py import numpy as np -from evtk.hl import imageToVTK, linesToVTK -from evtk.vtk import VtkGroup, VtkImageData, VtkUnstructuredGrid +from evtk.hl import linesToVTK +from evtk.vtk import VtkUnstructuredGrid from tqdm import tqdm import gprMax.config as config from gprMax.grid.fdtd_grid import FDTDGrid +from gprMax.vtkhdf import VtkImageData from ._version import __version__ from .cython.geometry_outputs import write_lines @@ -53,8 +54,7 @@ def save_geometry_views(gvs): total=gv.nbytes, unit="byte", unit_scale=True, - desc=f"Writing geometry view file {i + 1}/{len(gvs)}, " - f"{gv.filename.name}{gv.vtkfiletype.ext}", + desc=f"Writing geometry view file {i + 1}/{len(gvs)}, {gv.filename.name}.vtkhdf", ncols=get_terminal_width() - 1, file=sys.stdout, disable=not config.sim_config.general["progressbars"], @@ -255,11 +255,11 @@ class GeometryViewLines(GeometryView): class GeometryViewVoxels(GeometryView): - """Imagedata (.vti) for a per-cell geometry view.""" + """Image data for a per-cell geometry view.""" def __init__(self, *args): super().__init__(*args) - self.vtkfiletype = VtkImageData + self.vtkfiletype = ".vtkhdf" def prep_vtk(self): """Prepares data for writing to VTK file. @@ -306,22 +306,20 @@ class GeometryViewVoxels(GeometryView): """ if isinstance(self.grid, SubGridBaseGrid): - origin = ( - (self.grid.i0 * self.grid.dx * self.grid.ratio), - (self.grid.j0 * self.grid.dy * self.grid.ratio), - (self.grid.k0 * self.grid.dz * self.grid.ratio), + origin = np.array( + [ + (self.grid.i0 * self.grid.dx * self.grid.ratio), + (self.grid.j0 * self.grid.dy * self.grid.ratio), + (self.grid.k0 * self.grid.dz * self.grid.ratio), + ] ) else: - origin = ((self.xs * self.grid.dx), (self.ys * self.grid.dy), (self.zs * self.grid.dz)) + origin = self.start * self.grid.dl - # Write the VTK file .vti - imageToVTK( - str(self.filename), - origin=origin, - spacing=((self.dx * self.grid.dx), (self.dy * self.grid.dy), (self.dz * self.grid.dz)), - cellData={"Material": vtk_data["data"]}, - comments=[vtk_data["comments"]], - ) + spacing = self.step * self.grid.dl + + with VtkImageData(self.filename, self.size, origin=origin, spacing=spacing) as f: + f.add_cell_data("Material", vtk_data["data"]) class Comments: From d0cc25506ae8c43a697e2b3bc28e3ab7e001366e Mon Sep 17 00:00:00 2001 From: nmannall Date: Fri, 1 Nov 2024 18:03:40 +0000 Subject: [PATCH 15/31] Add validation to ensure vtkImageData files are 3D --- gprMax/vtkhdf.py | 42 ++++++++++++++++++++++++++++++------------ 1 file changed, 30 insertions(+), 12 deletions(-) diff --git a/gprMax/vtkhdf.py b/gprMax/vtkhdf.py index f9403de9..0a2e6278 100644 --- a/gprMax/vtkhdf.py +++ b/gprMax/vtkhdf.py @@ -225,6 +225,8 @@ class VtkImageData(VtkHdfFile): SPACING_ATTR = "Spacing" WHOLE_EXTENT_ATTR = "WholeExtent" + DIMENSIONS = 3 + @property def TYPE(self) -> Literal["ImageData"]: return "ImageData" @@ -236,26 +238,33 @@ class VtkImageData(VtkHdfFile): origin: Optional[npt.NDArray[np.single]] = None, spacing: Optional[npt.NDArray[np.single]] = None, direction: Optional[npt.NDArray[np.single]] = None, - comm: Optional[MPI.Cartcomm] = None, + comm: Optional[MPI.Comm] = None, ) -> None: super().__init__(filename, comm) + if len(shape) == 0: + raise ValueError(f"Shape must not be empty.") + if len(shape) > self.DIMENSIONS: + raise ValueError(f"Shape must not have more than {self.DIMENSIONS} dimensions.") + elif len(shape) < self.DIMENSIONS: + shape = np.concatenate((shape, np.ones(self.DIMENSIONS - len(shape), dtype=np.intc))) + self.shape = shape - self.points_shape = shape + 1 - whole_extent = np.zeros(2 * len(self.shape), dtype=np.intc) + + whole_extent = np.zeros(2 * self.DIMENSIONS, dtype=np.intc) whole_extent[1::2] = self.shape self._set_root_attribute(self.WHOLE_EXTENT_ATTR, whole_extent) if origin is None: - origin = np.zeros(len(self.shape), dtype=np.single) + origin = np.zeros(self.DIMENSIONS, dtype=np.single) self.set_origin(origin) if spacing is None: - spacing = np.ones(len(self.shape), dtype=np.single) + spacing = np.ones(self.DIMENSIONS, dtype=np.single) self.set_spacing(spacing) if direction is None: - direction = np.diag(np.ones(len(self.shape), dtype=np.single)).flatten() + direction = np.diag(np.ones(self.DIMENSIONS, dtype=np.single)).flatten() self.set_direction(direction) @property @@ -275,30 +284,39 @@ class VtkImageData(VtkHdfFile): return self._get_root_attribute(self.DIRECTION_ATTR) def set_origin(self, origin: npt.NDArray[np.single]): + if len(origin) != self.DIMENSIONS: + raise ValueError(f"Origin attribute must have {self.DIMENSIONS} dimensions.") self._set_root_attribute(self.ORIGIN_ATTR, origin) def set_spacing(self, spacing: npt.NDArray[np.single]): + if len(spacing) != self.DIMENSIONS: + raise ValueError(f"Spacing attribute must have {self.DIMENSIONS} dimensions.") self._set_root_attribute(self.SPACING_ATTR, spacing) def set_direction(self, direction: npt.NDArray[np.single]): + if len(direction) != self.DIMENSIONS * self.DIMENSIONS: + raise ValueError( + f"Direction attribute must have {self.DIMENSIONS * self.DIMENSIONS} dimensions." + ) self._set_root_attribute(self.DIRECTION_ATTR, direction) def add_point_data( self, name: str, data: npt.NDArray, offset: Optional[npt.NDArray[np.intc]] = None ): - if offset is None and any(data.shape != self.points_shape): # type: ignore + points_shape = self.shape + 1 + if offset is None and any(data.shape != points_shape): # type: ignore raise ValueError( - f"If no offset is specified, data.shape {data.shape} must match the shape of the" - f" VtkImageData point datasets {self.points_shape}" + "If no offset is specified, data.shape must be one larger in each dimension than" + f" this vtkImageData object. {data.shape} != {points_shape}" ) - return super().add_point_data(name, data, self.points_shape, offset) + return super().add_point_data(name, data, points_shape, offset) def add_cell_data( self, name: str, data: npt.NDArray, offset: Optional[npt.NDArray[np.intc]] = None ): if offset is None and any(data.shape != self.shape): # type: ignore raise ValueError( - f"If no offset is specified, data.shape {data.shape} must match the shape of the" - f" VtkImageData {self.shape}" + "If no offset is specified, data.shape must match the dimensions of this" + f" vtkImageData object. {data.shape} != {self.shape}" ) return super().add_cell_data(name, data, self.shape, offset) From ad50e2ff91223ca643745e7f0245a18225d49de4 Mon Sep 17 00:00:00 2001 From: nmannall Date: Fri, 8 Nov 2024 12:12:19 +0000 Subject: [PATCH 16/31] Update documentation for VtkHdfFile and VtkImageData --- gprMax/vtkhdf.py | 168 +++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 148 insertions(+), 20 deletions(-) diff --git a/gprMax/vtkhdf.py b/gprMax/vtkhdf.py index 0a2e6278..dc9f80b9 100644 --- a/gprMax/vtkhdf.py +++ b/gprMax/vtkhdf.py @@ -19,11 +19,17 @@ class VtkHdfFile(AbstractContextManager): FILE_EXTENSION = ".vtkhdf" ROOT_GROUP = "VTKHDF" + VERSION_ATTR = "Version" + TYPE_ATTR = "Type" + @property @abstractmethod def TYPE(self) -> str: pass + def __enter__(self): + return self + def __init__(self, filename: Union[str, PathLike], comm: Optional[MPI.Comm] = None) -> None: """Create a new VtkHdfFile. @@ -60,11 +66,11 @@ class VtkHdfFile(AbstractContextManager): self.root_group = self.file_handler.create_group(self.ROOT_GROUP) # Set required Version and Type root attributes - self._set_root_attribute("Version", self.VERSION) + self._set_root_attribute(self.VERSION_ATTR, self.VERSION) type_as_ascii = self.TYPE.encode("ascii") self._set_root_attribute( - "Type", type_as_ascii, h5py.string_dtype("ascii", len(type_as_ascii)) + self.TYPE_ATTR, type_as_ascii, h5py.string_dtype("ascii", len(type_as_ascii)) ) def __exit__( @@ -98,6 +104,9 @@ class VtkHdfFile(AbstractContextManager): Args: attribute: Name of the attribute. + Returns: + value: Current value of the attribute if it exists. + Raises: KeyError: Raised if the attribute is not present as a key. """ @@ -124,17 +133,23 @@ class VtkHdfFile(AbstractContextManager): Args: *path: Components of the required path. + + Returns: + path: Path to the dataset. """ return "/".join([self.ROOT_GROUP, *path]) def _write_dataset( self, path: str, - data: npt.NDArray, + data: npt.ArrayLike, shape: Optional[npt.NDArray[np.intc]] = None, offset: Optional[npt.NDArray[np.intc]] = None, ): - """Write the specified dataset to the file. + """Write specified dataset to the VTKHDF file. + + If data has shape (d1, d2, ..., dn), i.e. n dimensions, then, if + specified, shape and offset must be of length n. Args: path: Absolute path to the dataset. @@ -143,8 +158,15 @@ class VtkHdfFile(AbstractContextManager): Can be omitted if data provides the full dataset. offset (optional): Offset to store the provided data at. Can be omitted if data provides the full dataset. + + Raises: + ValueError: Raised if the combination of data.shape, shape, + and offset are invalid. """ + if not isinstance(data, np.ndarray): + data = np.array([data]) + # VTKHDF stores datasets using ZYX ordering rather than XYZ data = data.transpose() @@ -156,10 +178,13 @@ class VtkHdfFile(AbstractContextManager): if shape is None or all(shape == data.shape): self.file_handler.create_dataset(path, data=data) + elif offset is None: + raise ValueError( + "Offset must not be None as the full dataset has not been provided." + " I.e. data.shape != shape" + ) else: dimensions = len(data.shape) - if offset is None: - offset = np.zeros(dimensions, dtype=np.intc) if dimensions != len(shape): raise ValueError( @@ -195,17 +220,17 @@ class VtkHdfFile(AbstractContextManager): shape: Optional[npt.NDArray[np.intc]] = None, offset: Optional[npt.NDArray[np.intc]] = None, ): - dataset_path = self._build_dataset_path("PointData", name) - self._write_dataset(dataset_path, data, shape, offset) + """Add point data to the VTKHDF file. - def add_field_data( - self, - name: str, - data: npt.NDArray, - shape: Optional[npt.NDArray[np.intc]] = None, - offset: Optional[npt.NDArray[np.intc]] = None, - ): - dataset_path = self._build_dataset_path("FieldData", name) + Args: + name: Name of the dataset. + data: Data to be saved. + shape (optional): Size of the full dataset being created. + Can be omitted if data provides the full dataset. + offset (optional): Offset to store the provided data at. Can + be omitted if data provides the full dataset. + """ + dataset_path = self._build_dataset_path("PointData", name) self._write_dataset(dataset_path, data, shape, offset) def add_cell_data( @@ -215,11 +240,47 @@ class VtkHdfFile(AbstractContextManager): shape: Optional[npt.NDArray[np.intc]] = None, offset: Optional[npt.NDArray[np.intc]] = None, ): + """Add cell data to the VTKHDF file. + + Args: + name: Name of the dataset. + data: Data to be saved. + shape (optional): Size of the full dataset being created. + Can be omitted if data provides the full dataset. + offset (optional): Offset to store the provided data at. Can + be omitted if data provides the full dataset. + """ dataset_path = self._build_dataset_path("CellData", name) self._write_dataset(dataset_path, data, shape, offset) + def add_field_data( + self, + name: str, + data: npt.NDArray, + shape: Optional[npt.NDArray[np.intc]] = None, + offset: Optional[npt.NDArray[np.intc]] = None, + ): + """Add field data to the VTKHDF file. + + Args: + name: Name of the dataset. + data: Data to be saved. + shape (optional): Size of the full dataset being created. + Can be omitted if data provides the full dataset. + offset (optional): Offset to store the provided data at. Can + be omitted if data provides the full dataset. + """ + dataset_path = self._build_dataset_path("FieldData", name) + self._write_dataset(dataset_path, data, shape, offset) + class VtkImageData(VtkHdfFile): + """File handler for creating a VTKHDF Image Data file. + + File format information is available here: + https://docs.vtk.org/en/latest/design_documents/VTKFileFormats.html#image-data + """ + DIRECTION_ATTR = "Direction" ORIGIN_ATTR = "Origin" SPACING_ATTR = "Spacing" @@ -239,7 +300,33 @@ class VtkImageData(VtkHdfFile): spacing: Optional[npt.NDArray[np.single]] = None, direction: Optional[npt.NDArray[np.single]] = None, comm: Optional[MPI.Comm] = None, - ) -> None: + ): + """Create a new VtkImageData file. + + If the file already exists, it will be overriden. Required + attributes (Type and Version) will be written to the file. + + The file will be opened using the 'mpio' h5py driver if an MPI + communicator is provided. + + Args: + filename: Name of the file (can be a file path). The file + extension will be set to '.vtkhdf'. + shape: Shape of the image data to be stored in the file. + This specifies the number of cells. Image data can be + 1D, 2D, or 3D. + origin (optional): Origin of the image data. Default + [0, 0, 0]. + spacing (optional): Discritisation of the image data. + Default [1, 1, 1]. + direction (optional): Array of direction vectors for each + dimension of the image data. Can be a flattened array. + I.e. [[1, 0, 0], [0, 1, 0], [0, 0, 1]] and + [1, 0, 0, 0, 1, 0, 0, 0, 1] are equivalent. Default + [[1, 0, 0], [0, 1, 0], [0, 0, 1]]. + comm (optional): MPI communicator containing all ranks that + want to write to the file. + """ super().__init__(filename, comm) if len(shape) == 0: @@ -264,7 +351,7 @@ class VtkImageData(VtkHdfFile): self.set_spacing(spacing) if direction is None: - direction = np.diag(np.ones(self.DIMENSIONS, dtype=np.single)).flatten() + direction = np.diag(np.ones(self.DIMENSIONS, dtype=np.single)) self.set_direction(direction) @property @@ -284,25 +371,55 @@ class VtkImageData(VtkHdfFile): return self._get_root_attribute(self.DIRECTION_ATTR) def set_origin(self, origin: npt.NDArray[np.single]): + """Set the origin coordinate of the image data. + + Args: + origin: x, y, z coordinates to set as the origin. + """ if len(origin) != self.DIMENSIONS: raise ValueError(f"Origin attribute must have {self.DIMENSIONS} dimensions.") self._set_root_attribute(self.ORIGIN_ATTR, origin) def set_spacing(self, spacing: npt.NDArray[np.single]): + """Set the discritisation of the image data. + + Args: + spacing: Discritisation of the x, y, and z dimensions. + """ if len(spacing) != self.DIMENSIONS: raise ValueError(f"Spacing attribute must have {self.DIMENSIONS} dimensions.") self._set_root_attribute(self.SPACING_ATTR, spacing) def set_direction(self, direction: npt.NDArray[np.single]): + """Set the coordinate system of the image data. + + Args: + direction: Array of direction vectors for each dimension of + the image data. Can be a flattened array. I.e. + [[1, 0, 0], [0, 1, 0], [0, 0, 1]] and + [1, 0, 0, 0, 1, 0, 0, 0, 1] are equivalent. + """ + direction = direction.flatten() if len(direction) != self.DIMENSIONS * self.DIMENSIONS: raise ValueError( - f"Direction attribute must have {self.DIMENSIONS * self.DIMENSIONS} dimensions." + f"Direction array must contain {self.DIMENSIONS * self.DIMENSIONS} elements." ) self._set_root_attribute(self.DIRECTION_ATTR, direction) def add_point_data( self, name: str, data: npt.NDArray, offset: Optional[npt.NDArray[np.intc]] = None ): + """Add point data to the VTKHDF file. + + Args: + name: Name of the dataset. + data: Data to be saved. + offset (optional): Offset to store the provided data at. Can + be omitted if data provides the full dataset. + + Raises: + ValueError: Raised if data has invalid dimensions. + """ points_shape = self.shape + 1 if offset is None and any(data.shape != points_shape): # type: ignore raise ValueError( @@ -314,9 +431,20 @@ class VtkImageData(VtkHdfFile): def add_cell_data( self, name: str, data: npt.NDArray, offset: Optional[npt.NDArray[np.intc]] = None ): + """Add cell data to the VTKHDF file. + + Args: + name: Name of the dataset. + data: Data to be saved. + offset (optional): Offset to store the provided data at. Can + be omitted if data provides the full dataset. + + Raises: + ValueError: Raised if data has invalid dimensions. + """ if offset is None and any(data.shape != self.shape): # type: ignore raise ValueError( "If no offset is specified, data.shape must match the dimensions of this" - f" vtkImageData object. {data.shape} != {self.shape}" + f" VtkImageData object. {data.shape} != {self.shape}" ) return super().add_cell_data(name, data, self.shape, offset) From b886e8e3ee1b8b02774ab0eec7aa810a34c5873e Mon Sep 17 00:00:00 2001 From: nmannall Date: Fri, 8 Nov 2024 12:16:43 +0000 Subject: [PATCH 17/31] Add VtkUnstructuredGrid class --- gprMax/vtkhdf.py | 259 ++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 258 insertions(+), 1 deletion(-) diff --git a/gprMax/vtkhdf.py b/gprMax/vtkhdf.py index dc9f80b9..36231b20 100644 --- a/gprMax/vtkhdf.py +++ b/gprMax/vtkhdf.py @@ -1,6 +1,7 @@ import logging from abc import abstractmethod from contextlib import AbstractContextManager +from enum import Enum from os import PathLike from pathlib import Path from types import TracebackType @@ -19,9 +20,15 @@ class VtkHdfFile(AbstractContextManager): FILE_EXTENSION = ".vtkhdf" ROOT_GROUP = "VTKHDF" + # TODO: Can this be moved to using an Enum like root datasets? + # Main barrier: Can't subclass an enum with members and any base + # Enum class would need VERSION and TYPE as members. VERSION_ATTR = "Version" TYPE_ATTR = "Type" + class Dataset(str, Enum): + pass + @property @abstractmethod def TYPE(self) -> str: @@ -139,12 +146,72 @@ class VtkHdfFile(AbstractContextManager): """ return "/".join([self.ROOT_GROUP, *path]) + def _get_root_dataset(self, name: "VtkHdfFile.Dataset") -> h5py.Dataset: + """Get specified dataset from the root group of the VTKHDF file. + + Args: + path: Name of the dataset. + + Returns: + dataset: Returns specified h5py dataset. + """ + path = self._build_dataset_path(name) + return self._get_dataset(path) + + def _get_dataset(self, path: str) -> h5py.Dataset: + """Get specified dataset. + + Args: + path: Absolute path to the dataset. + + Returns: + dataset: Returns specified h5py dataset. + + Raises: + KeyError: Raised if the dataset does not exist, or the path + points to some other object, e.g. a Group not a Dataset. + """ + cls = self.file_handler.get(path, getclass=True) + if cls == "default": + raise KeyError("Path does not exist") + elif cls != h5py.Dataset: + raise KeyError(f"Dataset not found. Found '{cls}' instead") + + dataset = self.file_handler.get(path) + assert isinstance(dataset, h5py.Dataset) + return dataset + + def _write_root_dataset( + self, + name: "VtkHdfFile.Dataset", + data: npt.ArrayLike, + shape: Optional[npt.NDArray[np.intc]] = None, + offset: Optional[npt.NDArray[np.intc]] = None, + xyz_data_ordering=True, + ): + """Write specified dataset to the root group of the VTKHDF file. + + Args: + name: Name of the dataset. + data: Data to initialize the dataset. + shape (optional): Size of the full dataset being created. + Can be omitted if data provides the full dataset. + offset (optional): Offset to store the provided data at. Can + be omitted if data provides the full dataset. + xyz_data_ordering (optional): If True, the data will be + transposed as VTKHDF stores datasets using ZYX ordering. + Default True. + """ + path = self._build_dataset_path(name) + self._write_dataset(path, data, shape, offset, xyz_data_ordering) + def _write_dataset( self, path: str, data: npt.ArrayLike, shape: Optional[npt.NDArray[np.intc]] = None, offset: Optional[npt.NDArray[np.intc]] = None, + xyz_data_ordering=True, ): """Write specified dataset to the VTKHDF file. @@ -158,6 +225,9 @@ class VtkHdfFile(AbstractContextManager): Can be omitted if data provides the full dataset. offset (optional): Offset to store the provided data at. Can be omitted if data provides the full dataset. + xyz_data_ordering (optional): If True, the data will be + transposed as VTKHDF stores datasets using ZYX ordering. + Default True. Raises: ValueError: Raised if the combination of data.shape, shape, @@ -168,7 +238,8 @@ class VtkHdfFile(AbstractContextManager): data = np.array([data]) # VTKHDF stores datasets using ZYX ordering rather than XYZ - data = data.transpose() + if xyz_data_ordering: + data = data.transpose() if shape is not None: shape = np.flip(shape) @@ -448,3 +519,189 @@ class VtkImageData(VtkHdfFile): f" VtkImageData object. {data.shape} != {self.shape}" ) return super().add_cell_data(name, data, self.shape, offset) + + +class VtkCellType(np.uint8, Enum): + """VTK cell types as defined here: + https://vtk.org/doc/nightly/html/vtkCellType_8h_source.html#l00019 + """ + + # Linear cells + EMPTY_CELL = 0 + VERTEX = 1 + POLY_VERTEX = 2 + LINE = 3 + POLY_LINE = 4 + TRIANGLE = 5 + TRIANGLE_STRIP = 6 + POLYGON = 7 + PIXEL = 8 + QUAD = 9 + TETRA = 10 + VOXEL = 11 + HEXAHEDRON = 12 + WEDGE = 13 + PYRAMID = 14 + PENTAGONAL_PRISM = 15 + HEXAGONAL_PRISM = 16 + + +class VtkUnstructuredGrid(VtkHdfFile): + """File handler for creating a VTKHDF Unstructured Grid file. + + File format information is available here: + https://docs.vtk.org/en/latest/design_documents/VTKFileFormats.html#unstructured-grid + """ + + class Dataset(VtkHdfFile.Dataset): + CONNECTIVITY = "Connectivity" + NUMBER_OF_CELLS = "NumberOfCells" + NUMBER_OF_CONNECTIVITY_IDS = "NumberOfConnectivityIds" + NUMBER_OF_POINTS = "NumberOfPoints" + OFFSETS = "Offsets" + POINTS = "Points" + TYPES = "Types" + + @property + def TYPE(self) -> Literal["UnstructuredGrid"]: + return "UnstructuredGrid" + + def __init__( + self, + filename: Union[str, PathLike], + points: npt.NDArray, + cell_types: npt.NDArray[VtkCellType], + connectivity: npt.NDArray, + cell_offsets: npt.NDArray, + comm: Optional[MPI.Comm] = None, + ) -> None: + """Create a new VtkUnstructuredGrid file. + + An unstructured grid has N points and C cells. A cell is defined + as a collection of points which is specified by the connectivity + and cell_offsets arguments along with the list of cell_types. + + If the file already exists, it will be overriden. Required + attributes (Type and Version) will be written to the file. + + The file will be opened using the 'mpio' h5py driver if an MPI + communicator is provided. + + Args: + filename: Name of the file (can be a file path). The file + extension will be set to '.vtkhdf'. + points: Array of point coordinates of shape (N, 3). + cell_types: Array of VTK cell types of shape (C,). + connectivity: Array of point IDs that together with + cell_offsets, defines the points that make up each cell. + Each point ID has a value between 0 and (N-1) inclusive + and corresponds to a point in the points array. + cell_offsets: Array listing where each cell starts and ends + in the connectivity array. It has shape (C + 1,). + comm (optional): MPI communicator containing all ranks that + want to write to the file. + + Raises: + Value Error: Raised if argument dimensions are invalid. + """ + super().__init__(filename, comm) + + if len(cell_offsets) != len(cell_types) + 1: + raise ValueError( + "cell_offsets should be one longer than cell_types." + " I.e. one longer than the number of cells" + ) + + is_sorted = lambda a: np.all(a[:-1] <= a[1:]) + if not is_sorted(cell_offsets): + raise ValueError("cell_offsets should be sorted in ascending order") + + if len(connectivity) < cell_offsets[-1]: + raise ValueError("Connectivity array is shorter than final cell_offsets value") + + elif len(connectivity) > cell_offsets[-1]: + raise logger.warning( + "Connectivity array longer than final cell_offsets value." + " Some connectivity data will be ignored" + ) + + self._write_root_dataset(self.Dataset.CONNECTIVITY, connectivity) + self._write_root_dataset(self.Dataset.NUMBER_OF_CELLS, len(cell_types)) + self._write_root_dataset(self.Dataset.NUMBER_OF_CONNECTIVITY_IDS, len(connectivity)) + self._write_root_dataset(self.Dataset.NUMBER_OF_POINTS, len(points)) + self._write_root_dataset(self.Dataset.OFFSETS, cell_offsets) + self._write_root_dataset(self.Dataset.POINTS, points, xyz_data_ordering=False) + self._write_root_dataset(self.Dataset.TYPES, cell_types) + + @property + def number_of_cells(self) -> int: + number_of_cells = self._get_root_dataset(self.Dataset.NUMBER_OF_CELLS) + return np.sum(number_of_cells, dtype=np.intc) + + @property + def number_of_connectivity_ids(self) -> int: + number_of_connectivity_ids = self._get_root_dataset(self.Dataset.NUMBER_OF_CONNECTIVITY_IDS) + return np.sum(number_of_connectivity_ids, dtype=np.intc) + + @property + def number_of_points(self) -> int: + number_of_points = self._get_root_dataset(self.Dataset.NUMBER_OF_POINTS) + return np.sum(number_of_points, dtype=np.intc) + + def add_point_data( + self, name: str, data: npt.NDArray, offset: Optional[npt.NDArray[np.intc]] = None + ): + """Add point data to the VTKHDF file. + + Args: + name: Name of the dataset. + data: Data to be saved. + offset (optional): Offset to store the provided data at. Can + be omitted if data provides the full dataset. + + Raises: + ValueError: Raised if data has invalid dimensions. + """ + shape = np.array(data.shape) + number_of_dimensions = len(shape) + + if number_of_dimensions < 1 or number_of_dimensions > 2: + raise ValueError(f"Data must have 1 or 2 dimensions, not {number_of_dimensions}") + elif len(data) != self.number_of_points: + raise ValueError( + "Length of data must match the number of points in the vtkUnstructuredGrid." + f" {len(data)} != {self.number_of_points}" + ) + elif number_of_dimensions == 2 and shape[1] != 1 and shape[1] != 3: + raise ValueError(f"The second dimension should have shape 1 or 3, not {shape[1]}") + + return super().add_point_data(name, data, shape, offset) + + def add_cell_data( + self, name: str, data: npt.NDArray, offset: Optional[npt.NDArray[np.intc]] = None + ): + """Add cell data to the VTKHDF file. + + Args: + name: Name of the dataset. + data: Data to be saved. + offset (optional): Offset to store the provided data at. Can + be omitted if data provides the full dataset. + + Raises: + ValueError: Raised if data has invalid dimensions. + """ + shape = np.array(data.shape) + number_of_dimensions = len(shape) + + if number_of_dimensions < 1 or number_of_dimensions > 2: + raise ValueError(f"Data must have 1 or 2 dimensions, not {number_of_dimensions}.") + elif len(data) != self.number_of_cells: + raise ValueError( + "Length of data must match the number of cells in the vtkUnstructuredGrid." + f" {len(data)} != {self.number_of_cells}" + ) + elif number_of_dimensions == 2 and shape[1] != 1 and shape[1] != 3: + raise ValueError(f"The second dimension should have shape 1 or 3, not {shape[1]}") + + return super().add_cell_data(name, data, shape, offset) From b6443f1599d3ba259d68009e63386428f5124c29 Mon Sep 17 00:00:00 2001 From: nmannall Date: Fri, 8 Nov 2024 12:40:49 +0000 Subject: [PATCH 18/31] Update GeometeryViewLines to use a VtkUnstructuredGrid --- gprMax/cython/geometry_outputs.pyx | 72 +++++++++++ gprMax/geometry_outputs.py | 193 ++++++++++++++--------------- 2 files changed, 163 insertions(+), 102 deletions(-) diff --git a/gprMax/cython/geometry_outputs.pyx b/gprMax/cython/geometry_outputs.pyx index 8cefaa5f..f93c58d4 100644 --- a/gprMax/cython/geometry_outputs.pyx +++ b/gprMax/cython/geometry_outputs.pyx @@ -17,9 +17,81 @@ # along with gprMax. If not, see . import numpy as np + cimport numpy as np +cpdef get_line_properties( + int number_of_lines, + int nx, + int ny, + int nz, + np.uint32_t[:, :, :, :] ID +): + """Generate connectivity array and get material ID for each line. + + Args: + number_of_lines: Number of lines. + nx: Number of points in the x dimension. + ny: Number of points in the y dimension. + nz: Number of points in the z dimension. + ID: memoryview of sampled ID array according to geometry view + spatial discretisation. + + Returns: + connectivity: NDArray of shape (2 * number_of_lines,) listing + the start and end point IDs of each line. + material_data: NDArray of shape (number_of_lines,) listing + material IDs for each line. + """ + cdef np.ndarray material_data = np.zeros(number_of_lines, dtype=np.uint32) + cdef np.ndarray connectivity = np.zeros(2 * number_of_lines, dtype=np.intc) + cdef int line_index = 0 + cdef int connectivity_index = 0 + cdef int point_id = 0 + + cdef int z_step = 1 + cdef int y_step = nz + 1 + cdef int x_step = y_step * (ny + 1) + + cdef int i, j, k + + for i in range(nx): + for j in range(ny): + for k in range(nz): + + # x-direction cell edge + material_data[line_index] = ID[0, i, j, k] + connectivity[connectivity_index] = point_id + connectivity[connectivity_index + 1] = point_id + x_step + line_index += 1 + connectivity_index += 2 + + # y-direction cell edge + material_data[line_index] = ID[1, i, j, k] + connectivity[connectivity_index] = point_id + connectivity[connectivity_index + 1] = point_id + y_step + line_index += 1 + connectivity_index += 2 + + # z-direction cell edge + material_data[line_index] = ID[2, i, j, k] + connectivity[connectivity_index] = point_id + connectivity[connectivity_index + 1] = point_id + z_step + line_index += 1 + connectivity_index += 2 + + # Next point + point_id += 1 + + # Skip point at (i, j, nz) + point_id += 1 + + # Skip points in line (i, ny, t) where 0 <= t <= nz + point_id += nz + 1 + + return connectivity, material_data + cpdef write_lines( float xs, float ys, diff --git a/gprMax/geometry_outputs.py b/gprMax/geometry_outputs.py index ee13a07b..1adfbe8a 100644 --- a/gprMax/geometry_outputs.py +++ b/gprMax/geometry_outputs.py @@ -19,27 +19,27 @@ import json import logging import sys +from abc import ABC, abstractmethod from pathlib import Path +from typing import List import h5py import numpy as np -from evtk.hl import linesToVTK -from evtk.vtk import VtkUnstructuredGrid from tqdm import tqdm import gprMax.config as config from gprMax.grid.fdtd_grid import FDTDGrid -from gprMax.vtkhdf import VtkImageData +from gprMax.vtkhdf import VtkCellType, VtkImageData, VtkUnstructuredGrid from ._version import __version__ -from .cython.geometry_outputs import write_lines +from .cython.geometry_outputs import get_line_properties from .subgrids.grid import SubGridBaseGrid from .utilities.utilities import get_terminal_width logger = logging.getLogger(__name__) -def save_geometry_views(gvs): +def save_geometry_views(gvs: "List[GeometryView]"): """Creates and saves geometryviews. Args: @@ -49,26 +49,28 @@ def save_geometry_views(gvs): logger.info("") for i, gv in enumerate(gvs): gv.set_filename() - vtk_data = gv.prep_vtk() + gv.prep_vtk() pbar = tqdm( total=gv.nbytes, unit="byte", unit_scale=True, - desc=f"Writing geometry view file {i + 1}/{len(gvs)}, {gv.filename.name}.vtkhdf", + desc=f"Writing geometry view file {i + 1}/{len(gvs)}, {gv.filename.name}", ncols=get_terminal_width() - 1, file=sys.stdout, disable=not config.sim_config.general["progressbars"], ) - gv.write_vtk(vtk_data) + gv.write_vtk() pbar.update(gv.nbytes) pbar.close() logger.info("") -class GeometryView: +class GeometryView(ABC): """Base class for Geometry Views.""" + FILE_EXTENSION = ".vtkhdf" + def __init__( self, xs: int, @@ -96,11 +98,14 @@ class GeometryView: self.step = np.array([dx, dy, dz], dtype=np.intc) self.size = self.stop - self.start - self.filename = filename + self.filename = Path(filename) self.filenamebase = filename self.grid = grid self.nbytes = None + self.material_data = None + self.materials = None + # Properties for backwards compatibility @property def xs(self) -> int: @@ -151,26 +156,26 @@ class GeometryView: return self.size[2] def set_filename(self): - """Constructs filename from user-supplied name and model run number.""" + """Construct filename from user-supplied name and model number.""" parts = config.get_model_config().output_file_path.parts self.filename = Path( *parts[:-1], self.filenamebase + config.get_model_config().appendmodelnumber - ) + ).with_suffix(self.FILE_EXTENSION) + + @abstractmethod + def prep_vtk(self): + pass + + @abstractmethod + def write_vtk(self): + pass class GeometryViewLines(GeometryView): - """Unstructured grid (.vtu) for a per-cell-edge geometry view.""" - - def __init__(self, *args): - super().__init__(*args) - self.vtkfiletype = VtkUnstructuredGrid + """Unstructured grid for a per-cell-edge geometry view.""" def prep_vtk(self): - """Prepares data for writing to VTK file. - - Returns: - vtk_data: dict of coordinates, data, and comments for VTK file. - """ + """Prepares data for writing to VTKHDF file.""" # Sample ID array according to geometry view spatial discretisation # Only create a new array if subsampling is required @@ -179,7 +184,7 @@ class GeometryViewLines(GeometryView): or (self.dx, self.dy, self.dz) != (1, 1, 1) or (self.xs, self.ys, self.zs) != (0, 0, 0) ): - # Require contiguous for evtk library + # Require contiguous array ID = np.ascontiguousarray( self.grid.ID[ :, @@ -192,24 +197,33 @@ class GeometryViewLines(GeometryView): # This array is contiguous by design ID = self.grid.ID - x, y, z, lines = write_lines( - (self.xs * self.grid.dx), - (self.ys * self.grid.dy), - (self.zs * self.grid.dz), - self.nx, - self.ny, - self.nz, - (self.dx * self.grid.dx), - (self.dy * self.grid.dy), - (self.dz * self.grid.dz), - ID, - ) + x = np.arange(self.nx + 1, dtype=np.float64) + y = np.arange(self.ny + 1, dtype=np.float64) + z = np.arange(self.nz + 1, dtype=np.float64) + coords = np.meshgrid(x, y, z, indexing="ij") + self.points = np.vstack(list(map(np.ravel, coords))).T + self.points += self.start + self.points *= self.step * self.grid.dl # Add offset to subgrid geometry to correctly locate within main grid if isinstance(self.grid, SubGridBaseGrid): - x += self.grid.i0 * self.grid.dx * self.grid.ratio - y += self.grid.j0 * self.grid.dy * self.grid.ratio - z += self.grid.k0 * self.grid.dz * self.grid.ratio + offset = [self.grid.i0, self.grid.j0, self.grid.k0] + self.points += offset * self.grid.dl * self.grid.ratio + + # Each point is the 'source' for 3 lines. + # NB: Excluding points at the far edge of the geometry as those + # are the 'source' for no lines + n_lines = 3 * self.nx * self.ny * self.nz + + self.cell_types = np.full(n_lines, VtkCellType.LINE) + self.cell_offsets = np.arange(0, 2 * n_lines + 2, 2, dtype=np.intc) + + self.connectivity, self.material_data = get_line_properties( + n_lines, self.nx, self.ny, self.nz, ID + ) + + assert isinstance(self.connectivity, np.ndarray) + assert isinstance(self.material_data, np.ndarray) # Write information about any PMLs, sources, receivers comments = Comments(self.grid, self) @@ -218,55 +232,36 @@ class GeometryViewLines(GeometryView): info = comments.get_gprmax_info() comments = json.dumps(info) + self.materials = comments + # Number of bytes of data to be written to file - offsets_size = np.arange(start=2, step=2, stop=len(x) + 1, dtype="int32").nbytes - connect_size = len(x) * np.dtype("int32").itemsize - cell_type_size = len(x) * np.dtype("uint8").itemsize self.nbytes = ( - x.nbytes - + y.nbytes - + z.nbytes - + lines.nbytes - + offsets_size - + connect_size - + cell_type_size + self.points.nbytes + + self.cell_types.nbytes + + self.connectivity.nbytes + + self.cell_offsets.nbytes + + self.material_data.nbytes ) - vtk_data = {"x": x, "y": y, "z": z, "data": lines, "comments": comments} + def write_vtk(self): + """Writes geometry information to a VTKHDF file.""" - return vtk_data - - def write_vtk(self, vtk_data): - """Writes geometry information to a VTK file. - - Args: - vtk_data: dict of coordinates, data, and comments for VTK file. - """ - - # Write the VTK file .vtu - linesToVTK( - str(self.filename), - vtk_data["x"], - vtk_data["y"], - vtk_data["z"], - cellData={"Material": vtk_data["data"]}, - comments=[vtk_data["comments"]], - ) + # Write the VTK file + with VtkUnstructuredGrid( + self.filename, + self.points, + self.cell_types, + self.connectivity, + self.cell_offsets, + ) as f: + f.add_cell_data("Material", self.material_data) class GeometryViewVoxels(GeometryView): """Image data for a per-cell geometry view.""" - def __init__(self, *args): - super().__init__(*args) - self.vtkfiletype = ".vtkhdf" - def prep_vtk(self): - """Prepares data for writing to VTK file. - - Returns: - vtk_data: dict of data and comments for VTK file. - """ + """Prepares data for writing to VTKHDF file.""" # Sample solid array according to geometry view spatial discretisation # Only create a new array if subsampling is required @@ -275,8 +270,8 @@ class GeometryViewVoxels(GeometryView): or (self.dx, self.dy, self.dz) != (1, 1, 1) or (self.xs, self.ys, self.zs) != (0, 0, 0) ): - # Require contiguous for evtk library - solid = np.ascontiguousarray( + # Require contiguous array + self.material_data = np.ascontiguousarray( self.grid.solid[ self.xs : self.xf : self.dx, self.ys : self.yf : self.dy, @@ -285,28 +280,10 @@ class GeometryViewVoxels(GeometryView): ) else: # This array is contiguous by design - solid = self.grid.solid - - # Write information about any PMLs, sources, receivers - comments = Comments(self.grid, self) - info = comments.get_gprmax_info() - comments = json.dumps(info) - - self.nbytes = solid.nbytes - - vtk_data = {"data": solid, "comments": comments} - - return vtk_data - - def write_vtk(self, vtk_data): - """Writes geometry information to a VTK file. - - Args: - vtk_data: dict of data and comments for VTK file. - """ + self.material_data = self.grid.solid if isinstance(self.grid, SubGridBaseGrid): - origin = np.array( + self.origin = np.array( [ (self.grid.i0 * self.grid.dx * self.grid.ratio), (self.grid.j0 * self.grid.dy * self.grid.ratio), @@ -314,12 +291,24 @@ class GeometryViewVoxels(GeometryView): ] ) else: - origin = self.start * self.grid.dl + self.origin = self.start * self.grid.dl - spacing = self.step * self.grid.dl + self.spacing = self.step * self.grid.dl - with VtkImageData(self.filename, self.size, origin=origin, spacing=spacing) as f: - f.add_cell_data("Material", vtk_data["data"]) + # Write information about any PMLs, sources, receivers + comments = Comments(self.grid, self) + info = comments.get_gprmax_info() + comments = json.dumps(info) + + self.nbytes = self.material_data.nbytes + + self.materials = comments + + def write_vtk(self): + """Writes geometry information to a VTKHDF file.""" + + with VtkImageData(self.filename, self.size, self.origin, self.spacing) as f: + f.add_cell_data("Material", self.material_data) class Comments: From 24863a1b9f30114b76751eb4e52fb12f16116baf Mon Sep 17 00:00:00 2001 From: nmannall Date: Fri, 8 Nov 2024 12:55:23 +0000 Subject: [PATCH 19/31] Create folder to split up VTKHDF functionality --- gprMax/{ => vtkhdf_filehandlers}/vtkhdf.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename gprMax/{ => vtkhdf_filehandlers}/vtkhdf.py (100%) diff --git a/gprMax/vtkhdf.py b/gprMax/vtkhdf_filehandlers/vtkhdf.py similarity index 100% rename from gprMax/vtkhdf.py rename to gprMax/vtkhdf_filehandlers/vtkhdf.py From 237c3bae4a8269627404ea23a191998cf135384c Mon Sep 17 00:00:00 2001 From: nmannall Date: Fri, 8 Nov 2024 13:03:09 +0000 Subject: [PATCH 20/31] Split up VTKHDF classes into seperate files --- gprMax/geometry_outputs.py | 4 +- gprMax/vtkhdf_filehandlers/vtk_image_data.py | 184 ++++++++++ .../vtk_unstructured_grid.py | 172 +++++++++ gprMax/vtkhdf_filehandlers/vtkhdf.py | 339 +----------------- 4 files changed, 360 insertions(+), 339 deletions(-) create mode 100644 gprMax/vtkhdf_filehandlers/vtk_image_data.py create mode 100644 gprMax/vtkhdf_filehandlers/vtk_unstructured_grid.py diff --git a/gprMax/geometry_outputs.py b/gprMax/geometry_outputs.py index 1adfbe8a..c08e3c47 100644 --- a/gprMax/geometry_outputs.py +++ b/gprMax/geometry_outputs.py @@ -29,7 +29,9 @@ from tqdm import tqdm import gprMax.config as config from gprMax.grid.fdtd_grid import FDTDGrid -from gprMax.vtkhdf import VtkCellType, VtkImageData, VtkUnstructuredGrid +from gprMax.vtkhdf_filehandlers.vtk_image_data import VtkImageData +from gprMax.vtkhdf_filehandlers.vtk_unstructured_grid import VtkUnstructuredGrid +from gprMax.vtkhdf_filehandlers.vtkhdf import VtkCellType from ._version import __version__ from .cython.geometry_outputs import get_line_properties diff --git a/gprMax/vtkhdf_filehandlers/vtk_image_data.py b/gprMax/vtkhdf_filehandlers/vtk_image_data.py new file mode 100644 index 00000000..e521d201 --- /dev/null +++ b/gprMax/vtkhdf_filehandlers/vtk_image_data.py @@ -0,0 +1,184 @@ +from os import PathLike +from typing import Literal, Optional, Union + +import numpy as np +import numpy.typing as npt +from mpi4py import MPI + +from gprMax.vtkhdf_filehandlers.vtkhdf import VtkHdfFile + + +class VtkImageData(VtkHdfFile): + """File handler for creating a VTKHDF Image Data file. + + File format information is available here: + https://docs.vtk.org/en/latest/design_documents/VTKFileFormats.html#image-data + """ + + DIRECTION_ATTR = "Direction" + ORIGIN_ATTR = "Origin" + SPACING_ATTR = "Spacing" + WHOLE_EXTENT_ATTR = "WholeExtent" + + DIMENSIONS = 3 + + @property + def TYPE(self) -> Literal["ImageData"]: + return "ImageData" + + def __init__( + self, + filename: Union[str, PathLike], + shape: npt.NDArray[np.intc], + origin: Optional[npt.NDArray[np.single]] = None, + spacing: Optional[npt.NDArray[np.single]] = None, + direction: Optional[npt.NDArray[np.single]] = None, + comm: Optional[MPI.Comm] = None, + ): + """Create a new VtkImageData file. + + If the file already exists, it will be overriden. Required + attributes (Type and Version) will be written to the file. + + The file will be opened using the 'mpio' h5py driver if an MPI + communicator is provided. + + Args: + filename: Name of the file (can be a file path). The file + extension will be set to '.vtkhdf'. + shape: Shape of the image data to be stored in the file. + This specifies the number of cells. Image data can be + 1D, 2D, or 3D. + origin (optional): Origin of the image data. Default + [0, 0, 0]. + spacing (optional): Discritisation of the image data. + Default [1, 1, 1]. + direction (optional): Array of direction vectors for each + dimension of the image data. Can be a flattened array. + I.e. [[1, 0, 0], [0, 1, 0], [0, 0, 1]] and + [1, 0, 0, 0, 1, 0, 0, 0, 1] are equivalent. Default + [[1, 0, 0], [0, 1, 0], [0, 0, 1]]. + comm (optional): MPI communicator containing all ranks that + want to write to the file. + """ + super().__init__(filename, comm) + + if len(shape) == 0: + raise ValueError(f"Shape must not be empty.") + if len(shape) > self.DIMENSIONS: + raise ValueError(f"Shape must not have more than {self.DIMENSIONS} dimensions.") + elif len(shape) < self.DIMENSIONS: + shape = np.concatenate((shape, np.ones(self.DIMENSIONS - len(shape), dtype=np.intc))) + + self.shape = shape + + whole_extent = np.zeros(2 * self.DIMENSIONS, dtype=np.intc) + whole_extent[1::2] = self.shape + self._set_root_attribute(self.WHOLE_EXTENT_ATTR, whole_extent) + + if origin is None: + origin = np.zeros(self.DIMENSIONS, dtype=np.single) + self.set_origin(origin) + + if spacing is None: + spacing = np.ones(self.DIMENSIONS, dtype=np.single) + self.set_spacing(spacing) + + if direction is None: + direction = np.diag(np.ones(self.DIMENSIONS, dtype=np.single)) + self.set_direction(direction) + + @property + def whole_extent(self) -> npt.NDArray[np.intc]: + return self._get_root_attribute(self.WHOLE_EXTENT_ATTR) + + @property + def origin(self) -> npt.NDArray[np.single]: + return self._get_root_attribute(self.ORIGIN_ATTR) + + @property + def spacing(self) -> npt.NDArray[np.single]: + return self._get_root_attribute(self.SPACING_ATTR) + + @property + def direction(self) -> npt.NDArray[np.single]: + return self._get_root_attribute(self.DIRECTION_ATTR) + + def set_origin(self, origin: npt.NDArray[np.single]): + """Set the origin coordinate of the image data. + + Args: + origin: x, y, z coordinates to set as the origin. + """ + if len(origin) != self.DIMENSIONS: + raise ValueError(f"Origin attribute must have {self.DIMENSIONS} dimensions.") + self._set_root_attribute(self.ORIGIN_ATTR, origin) + + def set_spacing(self, spacing: npt.NDArray[np.single]): + """Set the discritisation of the image data. + + Args: + spacing: Discritisation of the x, y, and z dimensions. + """ + if len(spacing) != self.DIMENSIONS: + raise ValueError(f"Spacing attribute must have {self.DIMENSIONS} dimensions.") + self._set_root_attribute(self.SPACING_ATTR, spacing) + + def set_direction(self, direction: npt.NDArray[np.single]): + """Set the coordinate system of the image data. + + Args: + direction: Array of direction vectors for each dimension of + the image data. Can be a flattened array. I.e. + [[1, 0, 0], [0, 1, 0], [0, 0, 1]] and + [1, 0, 0, 0, 1, 0, 0, 0, 1] are equivalent. + """ + direction = direction.flatten() + if len(direction) != self.DIMENSIONS * self.DIMENSIONS: + raise ValueError( + f"Direction array must contain {self.DIMENSIONS * self.DIMENSIONS} elements." + ) + self._set_root_attribute(self.DIRECTION_ATTR, direction) + + def add_point_data( + self, name: str, data: npt.NDArray, offset: Optional[npt.NDArray[np.intc]] = None + ): + """Add point data to the VTKHDF file. + + Args: + name: Name of the dataset. + data: Data to be saved. + offset (optional): Offset to store the provided data at. Can + be omitted if data provides the full dataset. + + Raises: + ValueError: Raised if data has invalid dimensions. + """ + points_shape = self.shape + 1 + if offset is None and any(data.shape != points_shape): # type: ignore + raise ValueError( + "If no offset is specified, data.shape must be one larger in each dimension than" + f" this vtkImageData object. {data.shape} != {points_shape}" + ) + return super().add_point_data(name, data, points_shape, offset) + + def add_cell_data( + self, name: str, data: npt.NDArray, offset: Optional[npt.NDArray[np.intc]] = None + ): + """Add cell data to the VTKHDF file. + + Args: + name: Name of the dataset. + data: Data to be saved. + offset (optional): Offset to store the provided data at. Can + be omitted if data provides the full dataset. + + Raises: + ValueError: Raised if data has invalid dimensions. + """ + if offset is None and any(data.shape != self.shape): # type: ignore + raise ValueError( + "If no offset is specified, data.shape must match the dimensions of this" + f" VtkImageData object. {data.shape} != {self.shape}" + ) + return super().add_cell_data(name, data, self.shape, offset) diff --git a/gprMax/vtkhdf_filehandlers/vtk_unstructured_grid.py b/gprMax/vtkhdf_filehandlers/vtk_unstructured_grid.py new file mode 100644 index 00000000..0f9a5c0f --- /dev/null +++ b/gprMax/vtkhdf_filehandlers/vtk_unstructured_grid.py @@ -0,0 +1,172 @@ +import logging +from os import PathLike +from typing import Literal, Optional, Union + +import numpy as np +import numpy.typing as npt +from mpi4py import MPI + +from gprMax.vtkhdf_filehandlers.vtkhdf import VtkCellType, VtkHdfFile + +logger = logging.getLogger(__name__) + + +class VtkUnstructuredGrid(VtkHdfFile): + """File handler for creating a VTKHDF Unstructured Grid file. + + File format information is available here: + https://docs.vtk.org/en/latest/design_documents/VTKFileFormats.html#unstructured-grid + """ + + class Dataset(VtkHdfFile.Dataset): + CONNECTIVITY = "Connectivity" + NUMBER_OF_CELLS = "NumberOfCells" + NUMBER_OF_CONNECTIVITY_IDS = "NumberOfConnectivityIds" + NUMBER_OF_POINTS = "NumberOfPoints" + OFFSETS = "Offsets" + POINTS = "Points" + TYPES = "Types" + + @property + def TYPE(self) -> Literal["UnstructuredGrid"]: + return "UnstructuredGrid" + + def __init__( + self, + filename: Union[str, PathLike], + points: npt.NDArray, + cell_types: npt.NDArray[VtkCellType], + connectivity: npt.NDArray, + cell_offsets: npt.NDArray, + comm: Optional[MPI.Comm] = None, + ) -> None: + """Create a new VtkUnstructuredGrid file. + + An unstructured grid has N points and C cells. A cell is defined + as a collection of points which is specified by the connectivity + and cell_offsets arguments along with the list of cell_types. + + If the file already exists, it will be overriden. Required + attributes (Type and Version) will be written to the file. + + The file will be opened using the 'mpio' h5py driver if an MPI + communicator is provided. + + Args: + filename: Name of the file (can be a file path). The file + extension will be set to '.vtkhdf'. + points: Array of point coordinates of shape (N, 3). + cell_types: Array of VTK cell types of shape (C,). + connectivity: Array of point IDs that together with + cell_offsets, defines the points that make up each cell. + Each point ID has a value between 0 and (N-1) inclusive + and corresponds to a point in the points array. + cell_offsets: Array listing where each cell starts and ends + in the connectivity array. It has shape (C + 1,). + comm (optional): MPI communicator containing all ranks that + want to write to the file. + + Raises: + Value Error: Raised if argument dimensions are invalid. + """ + super().__init__(filename, comm) + + if len(cell_offsets) != len(cell_types) + 1: + raise ValueError( + "cell_offsets should be one longer than cell_types." + " I.e. one longer than the number of cells" + ) + + is_sorted = lambda a: np.all(a[:-1] <= a[1:]) + if not is_sorted(cell_offsets): + raise ValueError("cell_offsets should be sorted in ascending order") + + if len(connectivity) < cell_offsets[-1]: + raise ValueError("Connectivity array is shorter than final cell_offsets value") + + elif len(connectivity) > cell_offsets[-1]: + raise logger.warning( + "Connectivity array longer than final cell_offsets value." + " Some connectivity data will be ignored" + ) + + self._write_root_dataset(self.Dataset.CONNECTIVITY, connectivity) + self._write_root_dataset(self.Dataset.NUMBER_OF_CELLS, len(cell_types)) + self._write_root_dataset(self.Dataset.NUMBER_OF_CONNECTIVITY_IDS, len(connectivity)) + self._write_root_dataset(self.Dataset.NUMBER_OF_POINTS, len(points)) + self._write_root_dataset(self.Dataset.OFFSETS, cell_offsets) + self._write_root_dataset(self.Dataset.POINTS, points, xyz_data_ordering=False) + self._write_root_dataset(self.Dataset.TYPES, cell_types) + + @property + def number_of_cells(self) -> int: + number_of_cells = self._get_root_dataset(self.Dataset.NUMBER_OF_CELLS) + return np.sum(number_of_cells, dtype=np.intc) + + @property + def number_of_connectivity_ids(self) -> int: + number_of_connectivity_ids = self._get_root_dataset(self.Dataset.NUMBER_OF_CONNECTIVITY_IDS) + return np.sum(number_of_connectivity_ids, dtype=np.intc) + + @property + def number_of_points(self) -> int: + number_of_points = self._get_root_dataset(self.Dataset.NUMBER_OF_POINTS) + return np.sum(number_of_points, dtype=np.intc) + + def add_point_data( + self, name: str, data: npt.NDArray, offset: Optional[npt.NDArray[np.intc]] = None + ): + """Add point data to the VTKHDF file. + + Args: + name: Name of the dataset. + data: Data to be saved. + offset (optional): Offset to store the provided data at. Can + be omitted if data provides the full dataset. + + Raises: + ValueError: Raised if data has invalid dimensions. + """ + shape = np.array(data.shape) + number_of_dimensions = len(shape) + + if number_of_dimensions < 1 or number_of_dimensions > 2: + raise ValueError(f"Data must have 1 or 2 dimensions, not {number_of_dimensions}") + elif len(data) != self.number_of_points: + raise ValueError( + "Length of data must match the number of points in the vtkUnstructuredGrid." + f" {len(data)} != {self.number_of_points}" + ) + elif number_of_dimensions == 2 and shape[1] != 1 and shape[1] != 3: + raise ValueError(f"The second dimension should have shape 1 or 3, not {shape[1]}") + + return super().add_point_data(name, data, shape, offset) + + def add_cell_data( + self, name: str, data: npt.NDArray, offset: Optional[npt.NDArray[np.intc]] = None + ): + """Add cell data to the VTKHDF file. + + Args: + name: Name of the dataset. + data: Data to be saved. + offset (optional): Offset to store the provided data at. Can + be omitted if data provides the full dataset. + + Raises: + ValueError: Raised if data has invalid dimensions. + """ + shape = np.array(data.shape) + number_of_dimensions = len(shape) + + if number_of_dimensions < 1 or number_of_dimensions > 2: + raise ValueError(f"Data must have 1 or 2 dimensions, not {number_of_dimensions}.") + elif len(data) != self.number_of_cells: + raise ValueError( + "Length of data must match the number of cells in the vtkUnstructuredGrid." + f" {len(data)} != {self.number_of_cells}" + ) + elif number_of_dimensions == 2 and shape[1] != 1 and shape[1] != 3: + raise ValueError(f"The second dimension should have shape 1 or 3, not {shape[1]}") + + return super().add_cell_data(name, data, shape, offset) diff --git a/gprMax/vtkhdf_filehandlers/vtkhdf.py b/gprMax/vtkhdf_filehandlers/vtkhdf.py index 36231b20..a444dcc7 100644 --- a/gprMax/vtkhdf_filehandlers/vtkhdf.py +++ b/gprMax/vtkhdf_filehandlers/vtkhdf.py @@ -5,7 +5,7 @@ from enum import Enum from os import PathLike from pathlib import Path from types import TracebackType -from typing import Literal, Optional, Union +from typing import Optional, Union import h5py import numpy as np @@ -345,182 +345,6 @@ class VtkHdfFile(AbstractContextManager): self._write_dataset(dataset_path, data, shape, offset) -class VtkImageData(VtkHdfFile): - """File handler for creating a VTKHDF Image Data file. - - File format information is available here: - https://docs.vtk.org/en/latest/design_documents/VTKFileFormats.html#image-data - """ - - DIRECTION_ATTR = "Direction" - ORIGIN_ATTR = "Origin" - SPACING_ATTR = "Spacing" - WHOLE_EXTENT_ATTR = "WholeExtent" - - DIMENSIONS = 3 - - @property - def TYPE(self) -> Literal["ImageData"]: - return "ImageData" - - def __init__( - self, - filename: Union[str, PathLike], - shape: npt.NDArray[np.intc], - origin: Optional[npt.NDArray[np.single]] = None, - spacing: Optional[npt.NDArray[np.single]] = None, - direction: Optional[npt.NDArray[np.single]] = None, - comm: Optional[MPI.Comm] = None, - ): - """Create a new VtkImageData file. - - If the file already exists, it will be overriden. Required - attributes (Type and Version) will be written to the file. - - The file will be opened using the 'mpio' h5py driver if an MPI - communicator is provided. - - Args: - filename: Name of the file (can be a file path). The file - extension will be set to '.vtkhdf'. - shape: Shape of the image data to be stored in the file. - This specifies the number of cells. Image data can be - 1D, 2D, or 3D. - origin (optional): Origin of the image data. Default - [0, 0, 0]. - spacing (optional): Discritisation of the image data. - Default [1, 1, 1]. - direction (optional): Array of direction vectors for each - dimension of the image data. Can be a flattened array. - I.e. [[1, 0, 0], [0, 1, 0], [0, 0, 1]] and - [1, 0, 0, 0, 1, 0, 0, 0, 1] are equivalent. Default - [[1, 0, 0], [0, 1, 0], [0, 0, 1]]. - comm (optional): MPI communicator containing all ranks that - want to write to the file. - """ - super().__init__(filename, comm) - - if len(shape) == 0: - raise ValueError(f"Shape must not be empty.") - if len(shape) > self.DIMENSIONS: - raise ValueError(f"Shape must not have more than {self.DIMENSIONS} dimensions.") - elif len(shape) < self.DIMENSIONS: - shape = np.concatenate((shape, np.ones(self.DIMENSIONS - len(shape), dtype=np.intc))) - - self.shape = shape - - whole_extent = np.zeros(2 * self.DIMENSIONS, dtype=np.intc) - whole_extent[1::2] = self.shape - self._set_root_attribute(self.WHOLE_EXTENT_ATTR, whole_extent) - - if origin is None: - origin = np.zeros(self.DIMENSIONS, dtype=np.single) - self.set_origin(origin) - - if spacing is None: - spacing = np.ones(self.DIMENSIONS, dtype=np.single) - self.set_spacing(spacing) - - if direction is None: - direction = np.diag(np.ones(self.DIMENSIONS, dtype=np.single)) - self.set_direction(direction) - - @property - def whole_extent(self) -> npt.NDArray[np.intc]: - return self._get_root_attribute(self.WHOLE_EXTENT_ATTR) - - @property - def origin(self) -> npt.NDArray[np.single]: - return self._get_root_attribute(self.ORIGIN_ATTR) - - @property - def spacing(self) -> npt.NDArray[np.single]: - return self._get_root_attribute(self.SPACING_ATTR) - - @property - def direction(self) -> npt.NDArray[np.single]: - return self._get_root_attribute(self.DIRECTION_ATTR) - - def set_origin(self, origin: npt.NDArray[np.single]): - """Set the origin coordinate of the image data. - - Args: - origin: x, y, z coordinates to set as the origin. - """ - if len(origin) != self.DIMENSIONS: - raise ValueError(f"Origin attribute must have {self.DIMENSIONS} dimensions.") - self._set_root_attribute(self.ORIGIN_ATTR, origin) - - def set_spacing(self, spacing: npt.NDArray[np.single]): - """Set the discritisation of the image data. - - Args: - spacing: Discritisation of the x, y, and z dimensions. - """ - if len(spacing) != self.DIMENSIONS: - raise ValueError(f"Spacing attribute must have {self.DIMENSIONS} dimensions.") - self._set_root_attribute(self.SPACING_ATTR, spacing) - - def set_direction(self, direction: npt.NDArray[np.single]): - """Set the coordinate system of the image data. - - Args: - direction: Array of direction vectors for each dimension of - the image data. Can be a flattened array. I.e. - [[1, 0, 0], [0, 1, 0], [0, 0, 1]] and - [1, 0, 0, 0, 1, 0, 0, 0, 1] are equivalent. - """ - direction = direction.flatten() - if len(direction) != self.DIMENSIONS * self.DIMENSIONS: - raise ValueError( - f"Direction array must contain {self.DIMENSIONS * self.DIMENSIONS} elements." - ) - self._set_root_attribute(self.DIRECTION_ATTR, direction) - - def add_point_data( - self, name: str, data: npt.NDArray, offset: Optional[npt.NDArray[np.intc]] = None - ): - """Add point data to the VTKHDF file. - - Args: - name: Name of the dataset. - data: Data to be saved. - offset (optional): Offset to store the provided data at. Can - be omitted if data provides the full dataset. - - Raises: - ValueError: Raised if data has invalid dimensions. - """ - points_shape = self.shape + 1 - if offset is None and any(data.shape != points_shape): # type: ignore - raise ValueError( - "If no offset is specified, data.shape must be one larger in each dimension than" - f" this vtkImageData object. {data.shape} != {points_shape}" - ) - return super().add_point_data(name, data, points_shape, offset) - - def add_cell_data( - self, name: str, data: npt.NDArray, offset: Optional[npt.NDArray[np.intc]] = None - ): - """Add cell data to the VTKHDF file. - - Args: - name: Name of the dataset. - data: Data to be saved. - offset (optional): Offset to store the provided data at. Can - be omitted if data provides the full dataset. - - Raises: - ValueError: Raised if data has invalid dimensions. - """ - if offset is None and any(data.shape != self.shape): # type: ignore - raise ValueError( - "If no offset is specified, data.shape must match the dimensions of this" - f" VtkImageData object. {data.shape} != {self.shape}" - ) - return super().add_cell_data(name, data, self.shape, offset) - - class VtkCellType(np.uint8, Enum): """VTK cell types as defined here: https://vtk.org/doc/nightly/html/vtkCellType_8h_source.html#l00019 @@ -544,164 +368,3 @@ class VtkCellType(np.uint8, Enum): PYRAMID = 14 PENTAGONAL_PRISM = 15 HEXAGONAL_PRISM = 16 - - -class VtkUnstructuredGrid(VtkHdfFile): - """File handler for creating a VTKHDF Unstructured Grid file. - - File format information is available here: - https://docs.vtk.org/en/latest/design_documents/VTKFileFormats.html#unstructured-grid - """ - - class Dataset(VtkHdfFile.Dataset): - CONNECTIVITY = "Connectivity" - NUMBER_OF_CELLS = "NumberOfCells" - NUMBER_OF_CONNECTIVITY_IDS = "NumberOfConnectivityIds" - NUMBER_OF_POINTS = "NumberOfPoints" - OFFSETS = "Offsets" - POINTS = "Points" - TYPES = "Types" - - @property - def TYPE(self) -> Literal["UnstructuredGrid"]: - return "UnstructuredGrid" - - def __init__( - self, - filename: Union[str, PathLike], - points: npt.NDArray, - cell_types: npt.NDArray[VtkCellType], - connectivity: npt.NDArray, - cell_offsets: npt.NDArray, - comm: Optional[MPI.Comm] = None, - ) -> None: - """Create a new VtkUnstructuredGrid file. - - An unstructured grid has N points and C cells. A cell is defined - as a collection of points which is specified by the connectivity - and cell_offsets arguments along with the list of cell_types. - - If the file already exists, it will be overriden. Required - attributes (Type and Version) will be written to the file. - - The file will be opened using the 'mpio' h5py driver if an MPI - communicator is provided. - - Args: - filename: Name of the file (can be a file path). The file - extension will be set to '.vtkhdf'. - points: Array of point coordinates of shape (N, 3). - cell_types: Array of VTK cell types of shape (C,). - connectivity: Array of point IDs that together with - cell_offsets, defines the points that make up each cell. - Each point ID has a value between 0 and (N-1) inclusive - and corresponds to a point in the points array. - cell_offsets: Array listing where each cell starts and ends - in the connectivity array. It has shape (C + 1,). - comm (optional): MPI communicator containing all ranks that - want to write to the file. - - Raises: - Value Error: Raised if argument dimensions are invalid. - """ - super().__init__(filename, comm) - - if len(cell_offsets) != len(cell_types) + 1: - raise ValueError( - "cell_offsets should be one longer than cell_types." - " I.e. one longer than the number of cells" - ) - - is_sorted = lambda a: np.all(a[:-1] <= a[1:]) - if not is_sorted(cell_offsets): - raise ValueError("cell_offsets should be sorted in ascending order") - - if len(connectivity) < cell_offsets[-1]: - raise ValueError("Connectivity array is shorter than final cell_offsets value") - - elif len(connectivity) > cell_offsets[-1]: - raise logger.warning( - "Connectivity array longer than final cell_offsets value." - " Some connectivity data will be ignored" - ) - - self._write_root_dataset(self.Dataset.CONNECTIVITY, connectivity) - self._write_root_dataset(self.Dataset.NUMBER_OF_CELLS, len(cell_types)) - self._write_root_dataset(self.Dataset.NUMBER_OF_CONNECTIVITY_IDS, len(connectivity)) - self._write_root_dataset(self.Dataset.NUMBER_OF_POINTS, len(points)) - self._write_root_dataset(self.Dataset.OFFSETS, cell_offsets) - self._write_root_dataset(self.Dataset.POINTS, points, xyz_data_ordering=False) - self._write_root_dataset(self.Dataset.TYPES, cell_types) - - @property - def number_of_cells(self) -> int: - number_of_cells = self._get_root_dataset(self.Dataset.NUMBER_OF_CELLS) - return np.sum(number_of_cells, dtype=np.intc) - - @property - def number_of_connectivity_ids(self) -> int: - number_of_connectivity_ids = self._get_root_dataset(self.Dataset.NUMBER_OF_CONNECTIVITY_IDS) - return np.sum(number_of_connectivity_ids, dtype=np.intc) - - @property - def number_of_points(self) -> int: - number_of_points = self._get_root_dataset(self.Dataset.NUMBER_OF_POINTS) - return np.sum(number_of_points, dtype=np.intc) - - def add_point_data( - self, name: str, data: npt.NDArray, offset: Optional[npt.NDArray[np.intc]] = None - ): - """Add point data to the VTKHDF file. - - Args: - name: Name of the dataset. - data: Data to be saved. - offset (optional): Offset to store the provided data at. Can - be omitted if data provides the full dataset. - - Raises: - ValueError: Raised if data has invalid dimensions. - """ - shape = np.array(data.shape) - number_of_dimensions = len(shape) - - if number_of_dimensions < 1 or number_of_dimensions > 2: - raise ValueError(f"Data must have 1 or 2 dimensions, not {number_of_dimensions}") - elif len(data) != self.number_of_points: - raise ValueError( - "Length of data must match the number of points in the vtkUnstructuredGrid." - f" {len(data)} != {self.number_of_points}" - ) - elif number_of_dimensions == 2 and shape[1] != 1 and shape[1] != 3: - raise ValueError(f"The second dimension should have shape 1 or 3, not {shape[1]}") - - return super().add_point_data(name, data, shape, offset) - - def add_cell_data( - self, name: str, data: npt.NDArray, offset: Optional[npt.NDArray[np.intc]] = None - ): - """Add cell data to the VTKHDF file. - - Args: - name: Name of the dataset. - data: Data to be saved. - offset (optional): Offset to store the provided data at. Can - be omitted if data provides the full dataset. - - Raises: - ValueError: Raised if data has invalid dimensions. - """ - shape = np.array(data.shape) - number_of_dimensions = len(shape) - - if number_of_dimensions < 1 or number_of_dimensions > 2: - raise ValueError(f"Data must have 1 or 2 dimensions, not {number_of_dimensions}.") - elif len(data) != self.number_of_cells: - raise ValueError( - "Length of data must match the number of cells in the vtkUnstructuredGrid." - f" {len(data)} != {self.number_of_cells}" - ) - elif number_of_dimensions == 2 and shape[1] != 1 and shape[1] != 3: - raise ValueError(f"The second dimension should have shape 1 or 3, not {shape[1]}") - - return super().add_cell_data(name, data, shape, offset) From d09b209ba3d72cff2d8f11ba5b372d8edf23d272 Mon Sep 17 00:00:00 2001 From: nmannall Date: Mon, 11 Nov 2024 16:56:06 +0000 Subject: [PATCH 21/31] Fix data type for string datasets --- gprMax/vtkhdf_filehandlers/vtkhdf.py | 32 +++++++++++++++++++++------- 1 file changed, 24 insertions(+), 8 deletions(-) diff --git a/gprMax/vtkhdf_filehandlers/vtkhdf.py b/gprMax/vtkhdf_filehandlers/vtkhdf.py index a444dcc7..dab3f676 100644 --- a/gprMax/vtkhdf_filehandlers/vtkhdf.py +++ b/gprMax/vtkhdf_filehandlers/vtkhdf.py @@ -203,7 +203,9 @@ class VtkHdfFile(AbstractContextManager): Default True. """ path = self._build_dataset_path(name) - self._write_dataset(path, data, shape, offset, xyz_data_ordering) + self._write_dataset( + path, data, shape=shape, offset=offset, xyz_data_ordering=xyz_data_ordering + ) def _write_dataset( self, @@ -211,6 +213,7 @@ class VtkHdfFile(AbstractContextManager): data: npt.ArrayLike, shape: Optional[npt.NDArray[np.intc]] = None, offset: Optional[npt.NDArray[np.intc]] = None, + dtype: Optional[npt.DTypeLike] = None, xyz_data_ordering=True, ): """Write specified dataset to the VTKHDF file. @@ -234,8 +237,18 @@ class VtkHdfFile(AbstractContextManager): and offset are invalid. """ + # If dtype is a string, ensure it is ascii encoded and use + # variable length strings + if dtype is not None and h5py.check_string_dtype(dtype) is not None: + dtype = h5py.string_dtype(encoding="ascii", length=None) + if not isinstance(data, np.ndarray): - data = np.array([data]) + data = np.array(data, dtype=dtype) + if data.ndim < 1: + data = np.expand_dims(data, axis=-1) + + if dtype is None: + dtype = data.dtype # VTKHDF stores datasets using ZYX ordering rather than XYZ if xyz_data_ordering: @@ -248,7 +261,7 @@ class VtkHdfFile(AbstractContextManager): offset = np.flip(offset) if shape is None or all(shape == data.shape): - self.file_handler.create_dataset(path, data=data) + self.file_handler.create_dataset(path, data=data, dtype=dtype) elif offset is None: raise ValueError( "Offset must not be None as the full dataset has not been provided." @@ -275,7 +288,7 @@ class VtkHdfFile(AbstractContextManager): f" {offset} + {data.shape} = {offset + data.shape} > {shape}" ) - dataset = self.file_handler.create_dataset(path, shape, data.dtype) + dataset = self.file_handler.create_dataset(path, shape=shape, dtype=dtype) start = offset stop = offset + data.shape @@ -302,7 +315,7 @@ class VtkHdfFile(AbstractContextManager): be omitted if data provides the full dataset. """ dataset_path = self._build_dataset_path("PointData", name) - self._write_dataset(dataset_path, data, shape, offset) + self._write_dataset(dataset_path, data, shape=shape, offset=offset) def add_cell_data( self, @@ -322,14 +335,15 @@ class VtkHdfFile(AbstractContextManager): be omitted if data provides the full dataset. """ dataset_path = self._build_dataset_path("CellData", name) - self._write_dataset(dataset_path, data, shape, offset) + self._write_dataset(dataset_path, data, shape=shape, offset=offset) def add_field_data( self, name: str, - data: npt.NDArray, + data: npt.ArrayLike, shape: Optional[npt.NDArray[np.intc]] = None, offset: Optional[npt.NDArray[np.intc]] = None, + dtype: Optional[npt.DTypeLike] = None, ): """Add field data to the VTKHDF file. @@ -342,7 +356,9 @@ class VtkHdfFile(AbstractContextManager): be omitted if data provides the full dataset. """ dataset_path = self._build_dataset_path("FieldData", name) - self._write_dataset(dataset_path, data, shape, offset) + self._write_dataset( + dataset_path, data, shape=shape, offset=offset, dtype=dtype, xyz_data_ordering=False + ) class VtkCellType(np.uint8, Enum): From 67d6d6307246242ee54a3feb0192c6f6d1e36d88 Mon Sep 17 00:00:00 2001 From: nmannall Date: Mon, 11 Nov 2024 17:01:59 +0000 Subject: [PATCH 22/31] Add gprmax metadata to geometry objects --- gprMax/geometry_outputs.py | 158 ++++++++++++++++++++++--------------- gprMax/materials.py | 34 ++++++-- 2 files changed, 123 insertions(+), 69 deletions(-) diff --git a/gprMax/geometry_outputs.py b/gprMax/geometry_outputs.py index c08e3c47..13a853bf 100644 --- a/gprMax/geometry_outputs.py +++ b/gprMax/geometry_outputs.py @@ -21,17 +21,20 @@ import logging import sys from abc import ABC, abstractmethod from pathlib import Path -from typing import List +from typing import Dict, List, Sequence, Tuple, Union import h5py import numpy as np +import numpy.typing as npt from tqdm import tqdm import gprMax.config as config from gprMax.grid.fdtd_grid import FDTDGrid +from gprMax.receivers import Rx +from gprMax.sources import Source from gprMax.vtkhdf_filehandlers.vtk_image_data import VtkImageData from gprMax.vtkhdf_filehandlers.vtk_unstructured_grid import VtkUnstructuredGrid -from gprMax.vtkhdf_filehandlers.vtkhdf import VtkCellType +from gprMax.vtkhdf_filehandlers.vtkhdf import VtkCellType, VtkHdfFile from ._version import __version__ from .cython.geometry_outputs import get_line_properties @@ -228,13 +231,7 @@ class GeometryViewLines(GeometryView): assert isinstance(self.material_data, np.ndarray) # Write information about any PMLs, sources, receivers - comments = Comments(self.grid, self) - comments.averaged_materials = True - comments.materials_only = True - info = comments.get_gprmax_info() - comments = json.dumps(info) - - self.materials = comments + self.metadata = Metadata(self.grid, self, averaged_materials=True, materials_only=True) # Number of bytes of data to be written to file self.nbytes = ( @@ -257,6 +254,7 @@ class GeometryViewLines(GeometryView): self.cell_offsets, ) as f: f.add_cell_data("Material", self.material_data) + self.metadata.write_to_vtkhdf(f) class GeometryViewVoxels(GeometryView): @@ -298,50 +296,48 @@ class GeometryViewVoxels(GeometryView): self.spacing = self.step * self.grid.dl # Write information about any PMLs, sources, receivers - comments = Comments(self.grid, self) - info = comments.get_gprmax_info() - comments = json.dumps(info) + self.metadata = Metadata(self.grid, self) self.nbytes = self.material_data.nbytes - self.materials = comments - def write_vtk(self): """Writes geometry information to a VTKHDF file.""" with VtkImageData(self.filename, self.size, self.origin, self.spacing) as f: f.add_cell_data("Material", self.material_data) + self.metadata.write_to_vtkhdf(f) -class Comments: +class Metadata: """Comments can be strings included in the header of XML VTK file, and are used to hold extra (gprMax) information about the VTK data. """ - def __init__(self, grid, gv): + def __init__( + self, + grid: FDTDGrid, + gv: GeometryView, + averaged_materials: bool = False, + materials_only: bool = False, + ): self.grid = grid self.gv = gv - self.averaged_materials = False - self.materials_only = False + self.averaged_materials = averaged_materials + self.materials_only = materials_only - def get_gprmax_info(self): - """Returns gprMax specific information relating material, source, - and receiver names to numeric identifiers. - """ + self.gprmax_version = __version__ + self.dx_dy_dz = self.grid.dl + self.nx_ny_nz = np.array([self.grid.nx, self.grid.ny, self.grid.nz], dtype=np.intc) - # Comments for Paraview macro - comments = { - "gprMax_version": __version__, - "dx_dy_dz": self.dx_dy_dz_comment(), - "nx_ny_nz": self.nx_ny_nz_comment(), - "Materials": self.materials_comment(), - } # Write the name and numeric ID for each material + self.materials = self.materials_comment() # Write information on PMLs, sources, and receivers if not self.materials_only: # Information on PML thickness if self.grid.pmls["slabs"]: - comments["PMLthickness"] = self.pml_gv_comment() + self.pml_thickness = self.pml_gv_comment() + else: + self.pml_thickness = None srcs = ( self.grid.hertziandipoles + self.grid.magneticdipoles @@ -349,53 +345,89 @@ class Comments: + self.grid.transmissionlines ) if srcs: - comments["Sources"] = self.srcs_rx_gv_comment(srcs) + self.source_ids, self.source_positions = self.srcs_rx_gv_comment(srcs) + else: + self.source_ids = None + self.source_positions = None if self.grid.rxs: - comments["Receivers"] = self.srcs_rx_gv_comment(self.grid.rxs) + self.receiver_ids, self.receiver_positions = self.srcs_rx_gv_comment(self.grid.rxs) + else: + self.receiver_ids = None + self.receiver_positions = None - return comments + def write_to_vtkhdf(self, file_handler: VtkHdfFile): + file_handler.add_field_data( + "gprMax_version", + self.gprmax_version, + dtype=h5py.string_dtype(), + ) + file_handler.add_field_data("dx_dy_dz", self.dx_dy_dz) + file_handler.add_field_data("nx_ny_nz", self.nx_ny_nz) - def pml_gv_comment(self): + file_handler.add_field_data( + "material_ids", + self.materials, + dtype=h5py.string_dtype(), + ) + + if not self.materials_only: + if self.pml_thickness is not None: + file_handler.add_field_data("pml_thickness", self.pml_thickness) + + if self.source_ids is not None and self.source_positions is not None: + file_handler.add_field_data( + "source_ids", self.source_ids, dtype=h5py.string_dtype() + ) + file_handler.add_field_data("sources", self.source_positions) + + if self.receiver_ids is not None and self.receiver_positions is not None: + file_handler.add_field_data( + "receiver_ids", self.receiver_ids, dtype=h5py.string_dtype() + ) + file_handler.add_field_data("receivers", self.receiver_positions) + + def pml_gv_comment(self) -> List[int]: grid = self.grid # Only render PMLs if they are in the geometry view - pmlstorender = dict.fromkeys(grid.pmls["thickness"], 0) + thickness: Dict[str, int] = grid.pmls["thickness"] + gv_pml_depth = dict.fromkeys(thickness, 0) - # Casting to int required as json does not handle numpy types - if grid.pmls["thickness"]["x0"] - self.gv.xs > 0: - pmlstorender["x0"] = int(grid.pmls["thickness"]["x0"] - self.gv.xs) - if grid.pmls["thickness"]["y0"] - self.gv.ys > 0: - pmlstorender["y0"] = int(grid.pmls["thickness"]["y0"] - self.gv.ys) - if grid.pmls["thickness"]["z0"] - self.gv.zs > 0: - pmlstorender["z0"] = int(grid.pmls["thickness"]["z0"] - self.gv.zs) - if self.gv.xf > grid.nx - grid.pmls["thickness"]["xmax"]: - pmlstorender["xmax"] = int(self.gv.xf - (grid.nx - grid.pmls["thickness"]["xmax"])) - if self.gv.yf > grid.ny - grid.pmls["thickness"]["ymax"]: - pmlstorender["ymax"] = int(self.gv.yf - (grid.ny - grid.pmls["thickness"]["ymax"])) - if self.gv.zf > grid.nz - grid.pmls["thickness"]["zmax"]: - pmlstorender["zmax"] = int(self.gv.zf - (grid.nz - grid.pmls["thickness"]["zmax"])) + if self.gv.xs < thickness["x0"]: + gv_pml_depth["x0"] = thickness["x0"] - self.gv.xs + if self.gv.ys < thickness["y0"]: + gv_pml_depth["y0"] = thickness["y0"] - self.gv.ys + if thickness["z0"] - self.gv.zs > 0: + gv_pml_depth["z0"] = thickness["z0"] - self.gv.zs + if self.gv.xf > grid.nx - thickness["xmax"]: + gv_pml_depth["xmax"] = self.gv.xf - (grid.nx - thickness["xmax"]) + if self.gv.yf > grid.ny - thickness["ymax"]: + gv_pml_depth["ymax"] = self.gv.yf - (grid.ny - thickness["ymax"]) + if self.gv.zf > grid.nz - thickness["zmax"]: + gv_pml_depth["zmax"] = self.gv.zf - (grid.nz - thickness["zmax"]) - return list(pmlstorender.values()) + return list(gv_pml_depth.values()) - def srcs_rx_gv_comment(self, srcs): + def srcs_rx_gv_comment( + self, srcs: Union[Sequence[Source], List[Rx]] + ) -> Tuple[List[str], npt.NDArray[np.single]]: """Used to name sources and/or receivers.""" - sc = [] - for src in srcs: - p = (src.xcoord * self.grid.dx, src.ycoord * self.grid.dy, src.zcoord * self.grid.dz) - p = list(map(float, p)) + names: List[str] = [] + positions: npt.NDArray[np.single] = np.empty((len(srcs), 3)) + for index, src in enumerate(srcs): + position = src.coord * self.grid.dl + names.append(src.ID) + positions[index] = position - s = {"name": src.ID, "position": p} - sc.append(s) + return names, positions - return sc + def dx_dy_dz_comment(self) -> npt.NDArray[np.single]: + return self.grid.dl - def dx_dy_dz_comment(self): - return list(map(float, [self.grid.dx, self.grid.dy, self.grid.dz])) + def nx_ny_nz_comment(self) -> npt.NDArray[np.intc]: + return np.array([self.grid.nx, self.grid.ny, self.grid.nz], dtype=np.intc) - def nx_ny_nz_comment(self): - return list(map(int, [self.grid.nx, self.grid.ny, self.grid.nz])) - - def materials_comment(self): + def materials_comment(self) -> List[str]: if not self.averaged_materials: return [m.ID for m in self.grid.materials if m.type != "dielectric-smoothed"] else: diff --git a/gprMax/materials.py b/gprMax/materials.py index 5de9c7c6..34d95b59 100644 --- a/gprMax/materials.py +++ b/gprMax/materials.py @@ -30,7 +30,7 @@ class Material: their properties and update coefficients. """ - def __init__(self, numID, ID): + def __init__(self, numID: int, ID: str): """ Args: numID: int for numeric I of the material. @@ -239,7 +239,9 @@ class PeplinskiSoil: by Peplinski (http://dx.doi.org/10.1109/36.387598). """ - def __init__(self, ID, sandfraction, clayfraction, bulkdensity, sandpartdensity, watervolfraction): + def __init__( + self, ID, sandfraction, clayfraction, bulkdensity, sandpartdensity, watervolfraction + ): """ Args: ID: string for name of the soil. @@ -304,7 +306,12 @@ class PeplinskiSoil: muiter = np.nditer(mumaterials, flags=["c_index"]) while not muiter.finished: # Real part for frequencies in the range 1.4GHz to 18GHz - er = (1 + (self.rb / self.rs) * ((es**a) - 1) + (muiter[0] ** b1 * erealw**a) - muiter[0]) ** (1 / a) + er = ( + 1 + + (self.rb / self.rs) * ((es**a) - 1) + + (muiter[0] ** b1 * erealw**a) + - muiter[0] + ) ** (1 / a) # Real part for frequencies in the range 0.3GHz to 1.3GHz (linear # correction to 1.4-18GHz value) er = 1.15 * er - 0.68 @@ -499,7 +506,10 @@ def calculate_water_properties(T=25, S=0): delta = 25 - T beta = ( - 2.033e-2 + 1.266e-4 * delta + 2.464e-6 * delta**2 - S * (1.849e-5 - 2.551e-7 * delta + 2.551e-8 * delta**2) + 2.033e-2 + + 1.266e-4 * delta + + 2.464e-6 * delta**2 + - S * (1.849e-5 - 2.551e-7 * delta + 2.551e-8 * delta**2) ) sig_25s = S * (0.182521 - 1.46192e-3 * S + 2.09324e-5 * S**2 - 1.28205e-7 * S**3) sig = sig_25s * np.exp(-delta * beta) @@ -608,8 +618,20 @@ def process_materials(G): material.calculate_update_coeffsH(G) # Add update coefficients to overall storage for all materials - G.updatecoeffsE[material.numID, :] = material.CA, material.CBx, material.CBy, material.CBz, material.srce - G.updatecoeffsH[material.numID, :] = material.DA, material.DBx, material.DBy, material.DBz, material.srcm + G.updatecoeffsE[material.numID, :] = ( + material.CA, + material.CBx, + material.CBy, + material.CBz, + material.srce, + ) + G.updatecoeffsH[material.numID, :] = ( + material.DA, + material.DBx, + material.DBy, + material.DBz, + material.srcm, + ) # Add update coefficients to overall storage for dispersive materials if hasattr(material, "poles"): From 98b77504e34a90992f0b177eca22556f9808db1a Mon Sep 17 00:00:00 2001 From: nmannall Date: Tue, 12 Nov 2024 14:34:10 +0000 Subject: [PATCH 23/31] Update Paraview macro to use field data --- toolboxes/Utilities/Paraview/gprMax.py | 282 +++++++++++++++---------- 1 file changed, 175 insertions(+), 107 deletions(-) diff --git a/toolboxes/Utilities/Paraview/gprMax.py b/toolboxes/Utilities/Paraview/gprMax.py index 3c75aee1..3adaf67a 100644 --- a/toolboxes/Utilities/Paraview/gprMax.py +++ b/toolboxes/Utilities/Paraview/gprMax.py @@ -16,69 +16,103 @@ # You should have received a copy of the GNU General Public License # along with gprMax. If not, see . -import json -import os - +from paraview.servermanager import Proxy, SourceProxy from paraview.simple import ( AppendDatasets, Box, + ColorBy, + FetchData, GetActiveSource, GetActiveView, + GetDisplayProperties, GetParaViewVersion, Hide, - OpenDataFile, RenameSource, RenderAllViews, SetActiveSource, Show, Threshold, ) +from paraview.vtk.numpy_interface import dataset_adapter as dsa +from vtkmodules.vtkCommonCore import vtkStringArray + +COLOUR_SCALARS = ("CELLS", "Material") -def threshold_filt(input, lt, ut, scalars): +def threshold_materials(source: SourceProxy, view: Proxy, material_ids: vtkStringArray): """Create threshold filter according to Paraview version. Args: - input (array): input data to threshold filter - lt, ut (int): lower and upper bounds of thresholding operation - scalars (list/str): name of scalar array to perform thresholding - - Returns: - threshold (object): threshold filter + source: Input to the threshold filter. + view: The view proxy to show the threshold in. + material_ids: Array of material ids. A new threshold filter will + be created for each material. """ # Read Paraview version number to set threshold filter method pvv = GetParaViewVersion() - threshold = Threshold(Input=input) - threshold.Scalars = scalars + for index in range(material_ids.GetNumberOfValues()): + threshold = Threshold(Input=source, Scalars=COLOUR_SCALARS) - if pvv.major == 5 and pvv.minor < 10: - threshold.ThresholdRange = [lt, ut] - else: - threshold.LowerThreshold = lt - threshold.UpperThreshold = ut + if pvv.major == 5 and pvv.minor < 10: + threshold.ThresholdRange = [index, index] + else: + threshold.LowerThreshold = index + threshold.UpperThreshold = index - return threshold + RenameSource(material_ids.GetValue(index), threshold) + + # Show data in view, except for free_space + if index != 1: + Show(threshold, view) + + threshold.UpdatePipeline() -def display_pmls(pmlthick, dx_dy_dz, nx_ny_nz): - """Display PMLs as box sources using PML thickness values. - Only suitable for gprMax >= v4 +def create_box_sources(names: vtkStringArray, positions: dsa.VTKArray, dl: dsa.VTKArray): + """Create new single cell box sources. Args: - pmlthick (tuple): PML thickness values for each slab (cells) - dx_dy_dz (tuple): Spatial resolution (m) - nx_ny_dz (tuple): Domain size (cells) + names: Array of N names for the new sources. + positions: x, y, z coordinates of the new sources. This should + have shape (N, 3). + dl: x, y, z spatial resolution. + """ + for index in range(names.GetNumberOfValues()): + name = names.GetValue(index) + pos = positions[index] + src = Box( + Center=pos + dl / 2, + XLength=dl[0], + YLength=dl[1], + ZLength=dl[2], + ) + RenameSource(name, src) + Show(src) + + +def display_pmls(pmlthick: dsa.VTKArray, dx_dy_dz: dsa.VTKArray, nx_ny_nz: dsa.VTKArray): + """Display PMLs as box sources using PML thickness values. + + Only suitable for gprMax >= v4. + + Args: + pmlthick: PML thickness values for each slab (cells). + dx_dy_dz: Spatial resolution (m). + nx_ny_dz: Domain size (cells). """ pml_names = ["x0", "y0", "z0", "xmax", "ymax", "zmax"] pmls = dict.fromkeys(pml_names, None) - SetActiveSource(pv_data) if pmlthick[0] != 0: x0 = Box( - Center=[pmlthick[0] * dx_dy_dz[0] / 2, nx_ny_nz[1] * dx_dy_dz[1] / 2, nx_ny_nz[2] * dx_dy_dz[2] / 2], + Center=[ + pmlthick[0] * dx_dy_dz[0] / 2, + nx_ny_nz[1] * dx_dy_dz[1] / 2, + nx_ny_nz[2] * dx_dy_dz[2] / 2, + ], XLength=pmlthick[0] * dx_dy_dz[0], YLength=nx_ny_nz[1] * dx_dy_dz[1], ZLength=nx_ny_nz[2] * dx_dy_dz[2], @@ -100,7 +134,11 @@ def display_pmls(pmlthick, dx_dy_dz, nx_ny_nz): if pmlthick[1] != 0: y0 = Box( - Center=[nx_ny_nz[0] * dx_dy_dz[0] / 2, pmlthick[1] * dx_dy_dz[1] / 2, nx_ny_nz[2] * dx_dy_dz[2] / 2], + Center=[ + nx_ny_nz[0] * dx_dy_dz[0] / 2, + pmlthick[1] * dx_dy_dz[1] / 2, + nx_ny_nz[2] * dx_dy_dz[2] / 2, + ], XLength=nx_ny_nz[0] * dx_dy_dz[0], YLength=pmlthick[1] * dx_dy_dz[1], ZLength=nx_ny_nz[2] * dx_dy_dz[2], @@ -122,7 +160,11 @@ def display_pmls(pmlthick, dx_dy_dz, nx_ny_nz): if pmlthick[2] != 0: z0 = Box( - Center=[nx_ny_nz[0] * dx_dy_dz[0] / 2, nx_ny_nz[1] * dx_dy_dz[1] / 2, pmlthick[2] * dx_dy_dz[2] / 2], + Center=[ + nx_ny_nz[0] * dx_dy_dz[0] / 2, + nx_ny_nz[1] * dx_dy_dz[1] / 2, + pmlthick[2] * dx_dy_dz[2] / 2, + ], XLength=nx_ny_nz[0] * dx_dy_dz[0], YLength=nx_ny_nz[1] * dx_dy_dz[1], ZLength=pmlthick[2] * dx_dy_dz[2], @@ -158,96 +200,122 @@ def display_pmls(pmlthick, dx_dy_dz, nx_ny_nz): pml_view.Opacity = 0.5 -# Get whatever source is loaded - should be loaded file (.vt*) or files (.pvd) -pv_data = GetActiveSource() +def is_valid_key(key: str, dictionary: dict) -> bool: + """Check key exists and is not an empty array (i.e. VTKNoneArray). -# Hide display of root data -Hide(pv_data) + Args: + key: Name of key to check. + dictionary: Dictionary like object that should contain the key. -# Single .vti or .vtu file -file = pv_data.FileName[0] + Returns: + is_valid_key: True if the key is in the dictionary and is not + of type VTKNoneArray. False otherwise. + """ + return key in dictionary.keys() and not isinstance(dictionary[key], dsa.VTKNoneArray) -# Read and display data from file, i.e. materials, sources, receivers, and PMLs -with open(file, "rb") as f: - # Comments () embedded in line 3 of file - f.readline() - f.readline() - c = f.readline().decode() - # Strip comment tags - c = c[5:-5] - # Model information - c = json.loads(c) - print("\ngprMax version: " + c["gprMax_version"]) - print(file) -################ -# Display data # -################ -pv_view = GetActiveView() -pv_view.AxesGrid.Visibility = 1 # Show Data Axes Grid -pv_disp = Show(pv_data, pv_view) -pv_disp.ColorArrayName = ["CELLS", "Material"] +class HaltException(Exception): + pass -# Discretisation -dl = c["dx_dy_dz"] -# Number of voxels -nl = c["nx_ny_nz"] -# Materials try: - for i, mat in enumerate(c["Materials"]): - threshold = threshold_filt(pv_data, i, i, ["CELLS", "Material"]) - RenameSource(mat, threshold) + print("=============== Running gprMax macro ===============", end="\n\n") - # Show data in view, except for free_space - if i != 1: - thresholddisplay = Show(threshold, pv_view) - thresholddisplay.ColorArrayName = ["CELLS", "Material"] - threshold.UpdatePipeline() -except KeyError: - print("No materials to load") + # Get active source - should be loaded file (.vtkhdf) + pv_source = GetActiveSource() -# Display any sources -try: - for item in c["Sources"]: - pos = item["position"] - name = item["name"] - src = Box( - Center=[pos[0] + dl[0] / 2, pos[1] + dl[1] / 2, pos[2] + dl[2] / 2], - XLength=dl[0], - YLength=dl[1], - ZLength=dl[2], + if pv_source is None: + raise HaltException("ERROR: No active source.") + + pv_source.UpdatePipeline() + + raw_data = FetchData(pv_source)[0] + data = dsa.WrapDataObject(raw_data) + + # Check the necessary metadata is attached to the file + metadata_keys = data.FieldData.keys() + compulsory_keys = ["gprMax_version", "nx_ny_nz", "dx_dy_dz", "material_ids"] + missing_keys = [] + for key in compulsory_keys: + if key not in metadata_keys: + missing_keys.append(key) + + if len(missing_keys) > 0: + keys = "\n- ".join(missing_keys) + print(f"Missing metadata fields: \n- {keys}\n") + if len(metadata_keys) > 0: + keys = "\n- ".join(metadata_keys) + print(f"Found: \n- {keys}\n") + raise HaltException( + "ERROR: Missing gprMax metadata information. Do you have the correct source selected?" ) - RenameSource(name, src) - Show(src) -except KeyError: - print("No sources to load") -# Display any receivers -try: - for item in c["Receivers"]: - pos = item["position"] - name = item["name"] - rx = Box( - Center=[pos[0] + dl[0] / 2, pos[1] + dl[1] / 2, pos[2] + dl[2] / 2], - XLength=dl[0], - YLength=dl[1], - ZLength=dl[2], - ) - RenameSource(name, rx) - Show(rx) -except KeyError: - print("No receivers to load") + # gprMax version + version = data.FieldData["gprMax_version"].GetValue(0) + # Number of voxels + nl = data.FieldData["nx_ny_nz"] + # Discretisation + dl = data.FieldData["dx_dy_dz"] -# Display any PMLs -try: - pt = c["PMLthickness"] - display_pmls(pt, dl, nl) -except KeyError: - print("No PMLs to load") + print("gprMax version:", data.FieldData["gprMax_version"].GetValue(0)) + print("nx_ny_nz:", nl) + print("dx_dy_dz:", dl, end="\n\n") + ################ + # Display data # + ################ -RenderAllViews() + # Get active view + pv_view = GetActiveView() + pv_view.AxesGrid.Visibility = 1 # Show Data Axes Grid -# Reset view to fit data -pv_view.ResetCamera() + # Get display properties + pv_disp = GetDisplayProperties(pv_source, view=pv_view) + + # Set scalar colouring + ColorBy(pv_disp, COLOUR_SCALARS) + + # Materials + print("Loading materials... ", end="\t") + if is_valid_key("material_ids", data.FieldData): + threshold_materials(pv_source, pv_view, data.FieldData["material_ids"]) + print("Done.") + else: + print("No materials to load.") + + # Display any sources + print("Loading sources... ", end="\t\t") + if is_valid_key("source_ids", data.FieldData) and is_valid_key("sources", data.FieldData): + create_box_sources(data.FieldData["source_ids"], data.FieldData["sources"], dl) + print("Done.") + else: + print("No sources to load.") + + # Display any receivers + print("Loading receivers... ", end="\t") + if is_valid_key("receiver_ids", data.FieldData) and is_valid_key("receivers", data.FieldData): + create_box_sources(data.FieldData["receiver_ids"], data.FieldData["receivers"], dl) + print("Done.") + else: + print("No receivers to load.") + + # Display any PMLs + print("Loading PMLs... ", end="\t\t") + if is_valid_key("pml_thickness", data.FieldData): + display_pmls(data.FieldData["pml_thickness"], dl, nl) + print("Done.") + else: + print("No PMLs to load.") + + RenderAllViews() + + SetActiveSource(pv_source) + Hide(pv_source) + + # Reset view to fit data + pv_view.ResetCamera() + +except HaltException as e: + print(e) + +print() From 439c4cf4875e84c8ab01da82b0f5d44e464adb07 Mon Sep 17 00:00:00 2001 From: nmannall Date: Tue, 12 Nov 2024 17:23:27 +0000 Subject: [PATCH 24/31] Correct GeometryView size when step is non zero --- gprMax/geometry_outputs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gprMax/geometry_outputs.py b/gprMax/geometry_outputs.py index 13a853bf..4c54ec44 100644 --- a/gprMax/geometry_outputs.py +++ b/gprMax/geometry_outputs.py @@ -101,7 +101,7 @@ class GeometryView(ABC): self.start = np.array([xs, ys, zs], dtype=np.intc) self.stop = np.array([xf, yf, zf], dtype=np.intc) self.step = np.array([dx, dy, dz], dtype=np.intc) - self.size = self.stop - self.start + self.size = (self.stop - self.start) // self.step self.filename = Path(filename) self.filenamebase = filename From 11008f9e97f4e9cf8e363ddbd03c93b089b622d9 Mon Sep 17 00:00:00 2001 From: nmannall Date: Tue, 26 Nov 2024 16:01:19 +0000 Subject: [PATCH 25/31] Replace np.single with np.float32 for consistancy --- gprMax/geometry_outputs.py | 6 ++--- gprMax/grid/fdtd_grid.py | 20 ++++++++-------- gprMax/snapshots.py | 4 ++-- gprMax/vtkhdf_filehandlers/vtk_image_data.py | 24 ++++++++++---------- 4 files changed, 27 insertions(+), 27 deletions(-) diff --git a/gprMax/geometry_outputs.py b/gprMax/geometry_outputs.py index 4c54ec44..7dd41b8e 100644 --- a/gprMax/geometry_outputs.py +++ b/gprMax/geometry_outputs.py @@ -410,10 +410,10 @@ class Metadata: def srcs_rx_gv_comment( self, srcs: Union[Sequence[Source], List[Rx]] - ) -> Tuple[List[str], npt.NDArray[np.single]]: + ) -> Tuple[List[str], npt.NDArray[np.float32]]: """Used to name sources and/or receivers.""" names: List[str] = [] - positions: npt.NDArray[np.single] = np.empty((len(srcs), 3)) + positions: npt.NDArray[np.float32] = np.empty((len(srcs), 3)) for index, src in enumerate(srcs): position = src.coord * self.grid.dl names.append(src.ID) @@ -421,7 +421,7 @@ class Metadata: return names, positions - def dx_dy_dz_comment(self) -> npt.NDArray[np.single]: + def dx_dy_dz_comment(self) -> npt.NDArray[np.float32]: return self.grid.dl def nx_ny_nz_comment(self) -> npt.NDArray[np.intc]: diff --git a/gprMax/grid/fdtd_grid.py b/gprMax/grid/fdtd_grid.py index 021e54a0..53d61d88 100644 --- a/gprMax/grid/fdtd_grid.py +++ b/gprMax/grid/fdtd_grid.py @@ -58,21 +58,21 @@ class FDTDGrid: self.nx = 0 self.ny = 0 self.nz = 0 - self.dl = np.ones(3, dtype=np.single) + self.dl = np.ones(3, dtype=np.float32) self.dt = 0.0 # Field Arrays - self.Ex: npt.NDArray[np.single] - self.Ey: npt.NDArray[np.single] - self.Ez: npt.NDArray[np.single] - self.Hx: npt.NDArray[np.single] - self.Hy: npt.NDArray[np.single] - self.Hz: npt.NDArray[np.single] + self.Ex: npt.NDArray[np.float32] + self.Ey: npt.NDArray[np.float32] + self.Ez: npt.NDArray[np.float32] + self.Hx: npt.NDArray[np.float32] + self.Hy: npt.NDArray[np.float32] + self.Hz: npt.NDArray[np.float32] # Dispersive Arrays - self.Tx: npt.NDArray[np.single] - self.Ty: npt.NDArray[np.single] - self.Tz: npt.NDArray[np.single] + self.Tx: npt.NDArray[np.float32] + self.Ty: npt.NDArray[np.float32] + self.Tz: npt.NDArray[np.float32] # Geometry Arrays self.solid: npt.NDArray[np.uint32] diff --git a/gprMax/snapshots.py b/gprMax/snapshots.py index 210bd36d..ce1fed87 100644 --- a/gprMax/snapshots.py +++ b/gprMax/snapshots.py @@ -110,7 +110,7 @@ class Snapshot: filename: str, fileext: str, outputs: Dict[str, bool], - grid_dl: npt.NDArray[np.single], + grid_dl: npt.NDArray[np.float32], grid_dt: float, ): """ @@ -358,7 +358,7 @@ class MPISnapshot(Snapshot): filename: str, fileext: str, outputs: Dict[str, bool], - grid_dl: npt.NDArray[np.single], + grid_dl: npt.NDArray[np.float32], grid_dt: float, ): super().__init__( diff --git a/gprMax/vtkhdf_filehandlers/vtk_image_data.py b/gprMax/vtkhdf_filehandlers/vtk_image_data.py index e521d201..9cc44622 100644 --- a/gprMax/vtkhdf_filehandlers/vtk_image_data.py +++ b/gprMax/vtkhdf_filehandlers/vtk_image_data.py @@ -30,9 +30,9 @@ class VtkImageData(VtkHdfFile): self, filename: Union[str, PathLike], shape: npt.NDArray[np.intc], - origin: Optional[npt.NDArray[np.single]] = None, - spacing: Optional[npt.NDArray[np.single]] = None, - direction: Optional[npt.NDArray[np.single]] = None, + origin: Optional[npt.NDArray[np.float32]] = None, + spacing: Optional[npt.NDArray[np.float32]] = None, + direction: Optional[npt.NDArray[np.float32]] = None, comm: Optional[MPI.Comm] = None, ): """Create a new VtkImageData file. @@ -77,15 +77,15 @@ class VtkImageData(VtkHdfFile): self._set_root_attribute(self.WHOLE_EXTENT_ATTR, whole_extent) if origin is None: - origin = np.zeros(self.DIMENSIONS, dtype=np.single) + origin = np.zeros(self.DIMENSIONS, dtype=np.float32) self.set_origin(origin) if spacing is None: - spacing = np.ones(self.DIMENSIONS, dtype=np.single) + spacing = np.ones(self.DIMENSIONS, dtype=np.float32) self.set_spacing(spacing) if direction is None: - direction = np.diag(np.ones(self.DIMENSIONS, dtype=np.single)) + direction = np.diag(np.ones(self.DIMENSIONS, dtype=np.float32)) self.set_direction(direction) @property @@ -93,18 +93,18 @@ class VtkImageData(VtkHdfFile): return self._get_root_attribute(self.WHOLE_EXTENT_ATTR) @property - def origin(self) -> npt.NDArray[np.single]: + def origin(self) -> npt.NDArray[np.float32]: return self._get_root_attribute(self.ORIGIN_ATTR) @property - def spacing(self) -> npt.NDArray[np.single]: + def spacing(self) -> npt.NDArray[np.float32]: return self._get_root_attribute(self.SPACING_ATTR) @property - def direction(self) -> npt.NDArray[np.single]: + def direction(self) -> npt.NDArray[np.float32]: return self._get_root_attribute(self.DIRECTION_ATTR) - def set_origin(self, origin: npt.NDArray[np.single]): + def set_origin(self, origin: npt.NDArray[np.float32]): """Set the origin coordinate of the image data. Args: @@ -114,7 +114,7 @@ class VtkImageData(VtkHdfFile): raise ValueError(f"Origin attribute must have {self.DIMENSIONS} dimensions.") self._set_root_attribute(self.ORIGIN_ATTR, origin) - def set_spacing(self, spacing: npt.NDArray[np.single]): + def set_spacing(self, spacing: npt.NDArray[np.float32]): """Set the discritisation of the image data. Args: @@ -124,7 +124,7 @@ class VtkImageData(VtkHdfFile): raise ValueError(f"Spacing attribute must have {self.DIMENSIONS} dimensions.") self._set_root_attribute(self.SPACING_ATTR, spacing) - def set_direction(self, direction: npt.NDArray[np.single]): + def set_direction(self, direction: npt.NDArray[np.float32]): """Set the coordinate system of the image data. Args: From 32392aba42cbb8b16d1f4742b4a635b5517c1150 Mon Sep 17 00:00:00 2001 From: nmannall Date: Tue, 26 Nov 2024 16:25:59 +0000 Subject: [PATCH 26/31] Change FDTDGrid.dl type to np.float64 --- gprMax/cmds_singleuse.py | 2 +- gprMax/grid/fdtd_grid.py | 8 ++++---- gprMax/model.py | 10 +++++++--- 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/gprMax/cmds_singleuse.py b/gprMax/cmds_singleuse.py index 7e47f4d3..e39bdf29 100644 --- a/gprMax/cmds_singleuse.py +++ b/gprMax/cmds_singleuse.py @@ -92,7 +92,7 @@ class Discretisation(UserObjectSingle): def build(self, model, uip): try: - model.dl = np.array(self.kwargs["p1"], dtype=float) + model.dl = np.array(self.kwargs["p1"], dtype=np.float64) except KeyError: logger.exception(f"{self.__str__()} discretisation requires a point") raise diff --git a/gprMax/grid/fdtd_grid.py b/gprMax/grid/fdtd_grid.py index 53d61d88..60004e7f 100644 --- a/gprMax/grid/fdtd_grid.py +++ b/gprMax/grid/fdtd_grid.py @@ -58,7 +58,7 @@ class FDTDGrid: self.nx = 0 self.ny = 0 self.nz = 0 - self.dl = np.ones(3, dtype=np.float32) + self.dl = np.ones(3, dtype=np.float64) self.dt = 0.0 # Field Arrays @@ -81,9 +81,9 @@ class FDTDGrid: self.ID: npt.NDArray[np.uint32] # Update Coefficient Arrays - self.updatecoeffsE: npt.NDArray - self.updatecoeffsH: npt.NDArray - self.updatecoeffsdispersive: npt.NDArray + self.updatecoeffsE: npt.NDArray[np.float32] + self.updatecoeffsH: npt.NDArray[np.float32] + self.updatecoeffsdispersive: npt.NDArray[np.float32] # PML parameters - set some defaults to use if not user provided self.pmls = {} diff --git a/gprMax/model.py b/gprMax/model.py index 87fa3f22..82add10c 100644 --- a/gprMax/model.py +++ b/gprMax/model.py @@ -23,6 +23,7 @@ from typing import List, Sequence import humanize import numpy as np +import numpy.typing as npt import psutil from colorama import Fore, Style, init @@ -125,11 +126,11 @@ class Model: self.G.dl[2] = value @property - def dl(self) -> np.ndarray: + def dl(self) -> npt.NDArray[np.float64]: return self.G.dl @dl.setter - def dl(self, value: np.ndarray): + def dl(self, value: npt.NDArray[np.float64]): self.G.dl = value @property @@ -164,7 +165,10 @@ class Model: self.p = psutil.Process() # Normal model reading/building process; bypassed if geometry information to be reused - self.reuse_geometry() if config.get_model_config().reuse_geometry() else self.build_geometry() + if config.get_model_config().reuse_geometry(): + self.reuse_geometry() + else: + self.build_geometry() logger.info( f"Output directory: {config.get_model_config().output_file_path.parent.resolve()}\n" From e8205cd85ff0b821cacbc9e2b424bdc52dfd2890 Mon Sep 17 00:00:00 2001 From: nmannall Date: Wed, 4 Dec 2024 12:06:33 +0000 Subject: [PATCH 27/31] Add geometry view reframe tests --- .gitattributes | 3 ++ .../full_volume.vtkhdf | 3 ++ .../partial_volume.vtkhdf | 3 ++ .../full_volume.vtkhdf | 3 ++ .../partial_volume.vtkhdf | Bin 0 -> 268444 bytes reframe_tests/tests/base_tests.py | 5 +- reframe_tests/tests/mixins.py | 45 ++++++++++++++++++ reframe_tests/tests/regression_checks.py | 15 +++++- .../geometry_view_tests/geometry_view_fine.in | 17 +++++++ .../geometry_view_voxel.in | 17 +++++++ reframe_tests/tests/standard_tests.py | 11 ++++- reframe_tests/tests/test_geometry_views.py | 20 ++++++++ 12 files changed, 138 insertions(+), 4 deletions(-) create mode 100644 reframe_tests/regression_checks/TestGeometryView_5176823e/full_volume.vtkhdf create mode 100644 reframe_tests/regression_checks/TestGeometryView_5176823e/partial_volume.vtkhdf create mode 100644 reframe_tests/regression_checks/TestGeometryView_77980202/full_volume.vtkhdf create mode 100644 reframe_tests/regression_checks/TestGeometryView_77980202/partial_volume.vtkhdf create mode 100644 reframe_tests/tests/src/geometry_view_tests/geometry_view_fine.in create mode 100644 reframe_tests/tests/src/geometry_view_tests/geometry_view_voxel.in create mode 100644 reframe_tests/tests/test_geometry_views.py diff --git a/.gitattributes b/.gitattributes index c8fd44ec..73f2c564 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1,4 @@ tools/Jupyter_notebooks/* linguist-vendored +reframe_tests/regression_checks/TestGeometryView_5176823e/partial_volume.vtkhdf filter=lfs diff=lfs merge=lfs -text +reframe_tests/regression_checks/TestGeometryView_5176823e/full_volume.vtkhdf filter=lfs diff=lfs merge=lfs -text +reframe_tests/regression_checks/TestGeometryView_77980202/full_volume.vtkhdf filter=lfs diff=lfs merge=lfs -text diff --git a/reframe_tests/regression_checks/TestGeometryView_5176823e/full_volume.vtkhdf b/reframe_tests/regression_checks/TestGeometryView_5176823e/full_volume.vtkhdf new file mode 100644 index 00000000..2e04533f --- /dev/null +++ b/reframe_tests/regression_checks/TestGeometryView_5176823e/full_volume.vtkhdf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4a8134fca3e6228211e4a4bc3e504e64f888c55f22e025b482ec501e0051d253 +size 75740296 diff --git a/reframe_tests/regression_checks/TestGeometryView_5176823e/partial_volume.vtkhdf b/reframe_tests/regression_checks/TestGeometryView_5176823e/partial_volume.vtkhdf new file mode 100644 index 00000000..a7986519 --- /dev/null +++ b/reframe_tests/regression_checks/TestGeometryView_5176823e/partial_volume.vtkhdf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2ccd95ff4ef73c224c99a8c321ad6f8014c673a1b33bbb741bff415dfbffd0aa +size 4931176 diff --git a/reframe_tests/regression_checks/TestGeometryView_77980202/full_volume.vtkhdf b/reframe_tests/regression_checks/TestGeometryView_77980202/full_volume.vtkhdf new file mode 100644 index 00000000..8472bcc7 --- /dev/null +++ b/reframe_tests/regression_checks/TestGeometryView_77980202/full_volume.vtkhdf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:82b8ac914a9fe20b49f2512215782f01305a2e3211aaac51913f88296a0e3ac5 +size 4012444 diff --git a/reframe_tests/regression_checks/TestGeometryView_77980202/partial_volume.vtkhdf b/reframe_tests/regression_checks/TestGeometryView_77980202/partial_volume.vtkhdf new file mode 100644 index 0000000000000000000000000000000000000000..43d5f5b042bc64ebd57fed841f3a558b96954093 GIT binary patch literal 268444 zcmeI3v5y_sb;jS4lC_a&ONs?ZNfQ<+6d*8ED1bW3FljSLWC@mFyQ3*_EiFQk1W5&y zQp80HrE`%cLH+=%3GyeXbZNLsnRIFm7;xYyQQm!TE;$;_oqNvt&Y79J@7s7eZ)VQ< zzTbTJym`B`%0FJc{N1lV`NvOg(%13v=CRE)yO;T&FS@UPec`d)p}e2Z|D=0y)NMcR zw!b*He{r-)|F_w^&>era(;wEo^@A%{HrvXqpSNAx=06|bzdFtbxIL%@u3q`}8|i|7 z++B5^(qDFSmc#hrTYt5k=rl?DudDflaeK5m{?hzD)6MUTPwWL>>HKT+d-1})KEDr) zIBaV)f#LkV*j>lR-F+=z)^|>B-n@MM{`I{7-P4nsH+l#1T)&hHD%&@||N7;02hR7+ zi|6+ZS?6lE|K5P3B71PB^XJwuuM3;~9VR7wVf!if!;`!BPH*4ZJhKgapZTl1td5TM ziRar@X?tO_U)iJ08*Fw`k#ODn_s{7 z_8XUm+Y6gJyQ@0dJh{`qe0ulfo%?B}_FvCl{F8_IJb%1-{_nfvIj59;`pVnQ=4Y4o zNAkbp`^BGsI-y@YwePQeemwZbzHUEL*^}EtNnrT-k$;Z(Wj8-Lhu5y(Ke>B){bt_J z_lsBa0p)i8`Ejm$k8d_V>E<@;emt1t%=PK-_V4&y_gS}p{QG<-P=YjGp6=(nEV@9B zHm~fi=;-|ZK>uEwPSp0#Z+{L=o$2S`{(U`tYNX@m&Yjx{A9ufboj>~Y>Fwe4_1TwK zV1HlSpPKLA;m;-SeSJRfv*eW4p5M%XGcY~_=_bf;X22QX3~&ZG1DpZQ0B3+Rz!~5S zoV6K9e^VIyUc+^carKWa$=K(Zdc73m6jT2k`Nr>Q{fwqwGru7&Q3Ji|Veqtrk2vF?}j z(c@O0VIRs->L2=8_e=WdaVyWT59KKJ4}GlrC4Ka`m1o$8a!mCPy|nvN+8*b$Ji~eq zhMLCEQ@UT%^*k?S%5|RJ8m;VK+G@1cRLkf5e9q4qD9^x}-m7Hcvlq?)XMi)n8Q=^Y z-VChqcaIWN>G+&;TVkjkpM$Z+P&*&VSo4;ik6@H~OY1xtrJkBP^40&_?5RAx@|*HN z&XJGnE13ag#-@5D-jbRr59FN0TUvu~lzOG!lDd)ya$MpotwA_Sy;5&!UCGCKTu0IgGUfWd7oUBXpM5hMX48MK@b%$6bK;tF%{c>{0nPw61Nmpq z(srqQOFX6Hb8(jZrN<*Vr3|&Y2*#SH_Pi&f)>~5V#V7Hy)EokzB^POrM}AT-OC8E* z@znH?Z`r>uXx7S7%k|~@PR)R2rZhcM-qLd#KW(j)VX98+si~jx)tu9KYHBqMQ?(jT zP3@GY_MFC7TB~KK)k}S)b$rIpXZ)Oj@(k48k35e(7h`_Rk2Am-Xki9QKT|EqUScX8 zpNqBRFFhW`DfO4ud2&iUHFaKm8c$8F2cO1ITYJ{|w0UUj(fqW2+Ioaf>t(4Y-!jj4 zEo)`zv97Ph%hE$W&fnM!SZ2x6Gv+NlX7SVZNLj|}v|g6_F<GHzOUhKL ztLY=>OP<=}2*X&f)?ZRT=3_Y~@v_udScW|oUun=ivDRsMhV@ooY2VOKvoG~P z>NGt4TEtV++xOJG)_5Sb8m4|N;;HHFdrDtx^wC-=Q@;-N)b#g#rLQ&mXswi~Ux#{X z`uo1p*BX7aR?5_`Lp?S9eP8KojXqi{W$M?Vo|^u?uk^J>AFY)#_3KbiO@H54^IGG9 z)M}XewTP#tx9_QWt?@u=HB7@=#8=uo^wjQ4eXMm_9@g{O7iWMoz!~5Sa0b3Q1H;eZ zr864x)b{7#tuaW?N3cr0rFCA6Qa^2-2cOncQ;+g#yezc{pT$qxb5{AZIaup0`B}Xz zedL?>zt6U;5z?bwU&PDOOTPK$&$2#fk7j*QFH1l9=9|BiwPlTv9?kla@2l%`*7BQs zjkWoa@5?i2$#2dKt$ghJ>a#Cq!b~^=oB_^&{|wZAhF&7`8dK?fG;7IMd)%8->o2MI zVw8AUYEV9lr=|zt)A(3x&nlla4{e_%KdlE+f7bYrEG)g+{45?wkA@HNvG$Vh%g-yc zIZE@>>U!0akL#Oez-ykgI-EZ&`?HeYQZp#ck9^~Es>yGO49^~5V!6)&t)}VY=Uuhr0C-p$;tbB;4rq{x! z(MM~~8Xua6wqKKa6OA8EFd{G?hDb>!pvM$DB|lk>;< zOEVyy7wpp_M8e2dG>n(g%FH4`6&*FjfoFzUa2Wzi1KdV04cNX~243K_F zeuzHWTjN9Z(SGt_pM@>wht7^i|5Sb+`ltr^;@%(V+IaMHeGhki)7pE?5TY?Pf8?vL zPwV+Dy~eftTHuph18exqn=^3sXJGhQyyTh>S)}`s%u)tvofoUrPh01~r}fm-BYYYk zYps>f>Zk3q@M%4edMzK~W$D%MS@hAKv%-hwVC|RWXVv!TYn{&{14N^iAENEi+cKZW z^`jb7`Jvh#{p71ZqngUO)%>QeY0h({{N`ks@*>}1zCY5sPtC<#=SuTOKH2)ToZlMO zcq%{g)n~AlAJ3otXWCq2u5oDwv_D6bbUcr1nM(DZOi8QcgBos`ehQ=@ej_%zq+T`S4Y+f$=gM zedf)4=g+rv&t76JN-WajQOr_*X`L6R)K6Q7@@f6F^$4HV%TjOQv-nthw0u?{YoCVC z>Vfo0`4A7J_bl)s*N^tr@#f$v|g5aE1$*7(xc_G zcpyC*KE%h`E9JB5qkU(A56uASujPkmd-P86dFZ1Wt@5GQ?bTn(&r2WGX^{`TX3zey z{5-Fv*=I5=C>XFa)`4zez-u+x(uCMn@Y4kXMhc$m`y*2X@T|1mV^6A!R zEWb5nSmNVx`!n}L%&vAHnmdykPwjjJZ_U$k-hZ4lZ z)BQd<%zK#M@ch#ILqQV>7VivHtv# zPrp8`<+rvq9_H5q-xxEG@$VsHGoZb14Ik=d>6h|Zv^{zxd>%eljT)a-*Q>8(KCcWAol<^?re|-9e4f{jYK`TG zYI^n$`8>5fw8*D_X3)>QrTqHbYkA$zZ!JuH4)XcEKl-_@$~JEa{dly{?b}6 zIcsCy{rMxGetlZYZ^+C2_59sq?ERUJEwM%=7U}T_W~sNP&Vy6qr>#Z!v|g5aE1$*7 z(qrMX_*i>1d{z&nPs)dQAiWYkL?7*Koe#Z!w0|l;RNJG!#OI;yq0u6r$8~#YjOFL0 z?V&T|^U(CvAfNts%6|U+{QAe&@|t{H-z8@;&)<>^Q{LqBd4Ei0yyQ8quh06X9G6_j zl6PKT^4Zph`Pp*I^VrXCjWgI9-;kI4%lv&I=bq*)u{1Vb-ntg_`Ez2omTmfYxe9N z%g<9E)oP6oySjFL$5QcKj#;vkLr@o=lK=74pIGF zU#@S|Y)N!Ee}^%DX`LnWFn3L!Kl0hur=Q;%GaT}9f7!ox^z*mA*ZUgYudPg_J`cW< zw#RXl&x2t~!^$_M>3Pn==jmV5((u)^BF;A4+LF&_eb=<6eU`kw{bOr-O+L=wlJm&< z!kG+_aKk;*?y^+%-8r^4Zp>pWh+BuFdV-+0=My=OcJ)o|f|-oEBec z55gz)Kx>ot6+m!)6IXVLcPk??u=ST$;VR$Z^YDLyX`M5n}uXnOXx z$meqrTgh*&Yn=0Ifv?YcD6h5m_BnFVGL`B*SxX-1 zag-7Duk~B`YPw$MEqq?y5*;m{MANfI%IE2+(US0K^t@|peBORqy_Wg3*BVh@$}hsp zqB+KA(TVD5jV~$(tL`vAt4>s3pD)S-(Iub#ci)tI&JS6G$X@c{>l?W~sCHUkR3oyV zeAxOhKjs(t{;X+p{y2ZN8Hqff=5Nh2NIv}ivE~dj+aYeP@9W5{tW2doFV>PidK~3L z{cHVJzM8Jrc?+MHw?s$7C(-k+k@9)_O7$dsQmu%(8efE`Mstc!qZ3uzDqmC{THUey zv>K81t?@->VbLDuXVHl4>GMVUShZW>vtD0xU(PSu$Ex1~pY_`2=u7#{;bZk6AGSWs zkNH{Odmeq9KhB@W99cCufB5{ZIm5(8KKmNy{0?nvy`SX1h`h44=hlPMlV_-f@C~&h z_N{yoEMuBlzA=r+V;a6lj*_;NuhbuXT*4R4Fr{DPo9dn8+!Ws&{xy#hU#)Mh^DXkt zm5sz_n4hF)&YG6^=FCOv)z44bGiP1qo6}R{MLzrQZYk%KpC<3QYsrVL5A$PwbH8W% zew;tfUwd7gaO`m8;_C1;D?*laOB=C}6y3;D1aWqx>mYtAsSwZNA-qqeorvMAp%-5QJZ z{FpX!OzNwtLwHiJ#>-NR@LBw{Jr+K#2U4%)L%b}#Qa+2eM~{Th!^f&or#`CH8XtPS-u?ajytO^_$cOz5mh`OEcx?r4^w= zK5Tu~o?pbwP!pRO=EwXv1M|$l+V9Wg!{&hb;rXpO!^9T-Exi{_BsjnB)+sx!rB)%5IZmCrK+ zL~ATRM9;f-$mgw(>b1m&UTZ{uKfeflR5SCTI#K=P!+(E^S{o1Dw7woXQ5xi1_xePw zJ9WA4oB_@NXJFYG;O8%vKl8)$TXTkqE&5w}4~*u>`cj@+UDos1*1W9eQ8ufuw9m>X z)ko_re5j|UU&E)-^{SQfdHHE|Bz#&;&-xmlrnwbzr>0-S zr_uGQmGXJ{X>}xgT20UT8lR_^MQe)BqUYUH;`8>g>b1ycz1E1nVSW)Fh-OQC$aO~b z_VbJKKy))7q7m6kKK%E$$n`X)ko_re5j|UU&E)-^{SQfdHHE| zBz#&;&-xmlrnwbzr>0-Sr_uGQmGXJ{X>}xgT20UT8lR_^ zMQe)BqUYUH;`8>g>b1ycz1E1nVSW)Fh-OQC$aO~b_VbJKKy))7q7m6kKK%E$$n`X)ko_re5j|UU(2V__NbNed3aegBzzWKubvv8mycCviqER)+1Dzc zX9kGYSbm6}ckhtTTOZYHi4VQji2iw(m3`4BHluawWC?a?FQ^YF21)cCBrUVT%1ULJ@}t9-~cd-jgy=c$is zwZ?~DuXlexKW}XhJ@OsK?;jqs5}}diFGAN#lYH3vFh49muQ{d;*MT#@8E9<=`1yg& zEb}}2`K@`46Wd(hrF+|4{(YYki}ZM3%j>n&TT|!3rt#C(B79meOTC59;$!X6@>zYX zeHuQi2hu0yLp+dP2_K@5_SX1NeYAgy57qYQFY$S3duWXDdFXm+w8rOk-CjDw{JeC% zbozW=nw~nX@OfUdr&i9-Q`1wc1wPN~_0~%Hd24#=kq=v+wddzKE7Zbfh50c*?F_8_ z{z5)%rkNj}-^r1hQ4%i=>mY<-v?^Rv9~ym~l)oIkHwvgmOB zIDeM8@#;zQx8@mag)eg+($+eIkr`Q;N_}3eC4KZb%7^;b`mKC5U9a;NJ}+;Hj)qU7 z=UpS^^Y)eMN%*8%5p^}b2v3dX6rV;XsOnqieV8Bfv%dE{`Z#}_KaV-GYHQ;MSnIk7HQ>osQwd8>wM;TH7TECUArt5Xy!sq2J(b4irG(Bsi ze4d^fEeW4S&%3t9=k2G}Yne}btr7L5{35(8nqzzxov5DH_@Z*K>JIa>>O}SR`Jy}! zUGmv~H%htZ{E#(>>?I$*zLD#LYNz!@H6r`Thpi9uV}6nE&zd&pkMmcXk;wCD{?(w{K=jDOul=u)$&)ydKJg*mwedzFhAz!@jkR_@bd$o zzcs&yTUp<=W|;G9fv?ZX*N^_&ucfj;@5vF|R;E&)2V+Uw<2cIa!7!y^<(tye-8zwiA@TUfsO>)aK=GXnJ#XZZ~d^ z=IU2nqWKP-(LT;ERGhk}PH)^xHFw^> zdF}ptr|%cXgXheqTJ*oqzlGgGVQ~$MU;0 z>%RZtk5cdUeX{?0vfKXj?#aoudv~tCbFx41weEPu3sp1k-MFa7!Me}C4sZ!SKaUvbL>m;e)C0!)AjFaajO1egF5U;<3w@Dlj^li&RM zwYT56G~C|UMv=d3&w}#r@Of*0=K9;`_shpg3%Awj{e-p_2`JT5uB>qZwv)rG&{^g(3*C&^T+y4iDpZa6~ literal 0 HcmV?d00001 diff --git a/reframe_tests/tests/base_tests.py b/reframe_tests/tests/base_tests.py index 7b02648e..d4551cee 100644 --- a/reframe_tests/tests/base_tests.py +++ b/reframe_tests/tests/base_tests.py @@ -182,7 +182,7 @@ class GprMaxBaseTest(RunOnlyRegressionTest): else: return self.getdep(variant) - def build_reference_filepath(self, name: Union[str, os.PathLike]) -> Path: + def build_reference_filepath(self, name: Union[str, os.PathLike], suffix: str = ".h5") -> Path: """Build path to the specified reference file. Reference files are saved in directories per test case. If this @@ -192,6 +192,7 @@ class GprMaxBaseTest(RunOnlyRegressionTest): Args: name: Name of the file. + suffix: File extension. Default ".h5". Returns: filepath: Absolute path to the reference file. @@ -202,7 +203,7 @@ class GprMaxBaseTest(RunOnlyRegressionTest): else: reference_dir = target.short_name - reference_file = Path("regression_checks", reference_dir, name).with_suffix(".h5") + reference_file = Path("regression_checks", reference_dir, name).with_suffix(suffix) return reference_file.absolute() # TODO: Change CreatePyenvTest to a fixture instead of a test dependency diff --git a/reframe_tests/tests/mixins.py b/reframe_tests/tests/mixins.py index e1b893b3..2bd1a04f 100644 --- a/reframe_tests/tests/mixins.py +++ b/reframe_tests/tests/mixins.py @@ -9,6 +9,7 @@ from typing_extensions import TYPE_CHECKING from reframe_tests.tests.base_tests import GprMaxBaseTest from reframe_tests.tests.regression_checks import ( + GeometryViewRegressionCheck, ReceiverRegressionCheck, RegressionCheck, SnapshotRegressionCheck, @@ -77,6 +78,50 @@ class SnapshotMixin(GprMaxMixin): self.regression_checks.append(regression_check) +class GeometryOnlyMixin(GprMaxMixin): + """Run test with geometry only flag""" + + @run_after("setup") + def add_geometry_only_flag(self): + self.executable_opts += ["--geometry-only"] + + +class GeometryViewMixin(GprMaxMixin): + """Add regression tests for geometry views. + + Attributes: + geometry_views (list[str]): List of geometry views to run + regression checks on. + """ + + geometry_views = variable(typ.List[str], value=[]) + + def build_geometry_view_filepath(self, geometry_view: str) -> Path: + """Build filepath to the specified geometry view. + + Args: + geometry_view: Name of the geometry view. + """ + return Path(geometry_view).with_suffix(".vtkhdf") + + @run_after("setup") + def add_geometry_view_regression_checks(self): + """Add a regression check for each geometry view. + + The test will be skipped if no geometry views have been specified. + """ + self.skip_if( + len(self.geometry_views) < 0, + f"Must provide a list of geometry views.", + ) + + for geometry_view in self.geometry_views: + geometry_view_file = self.build_geometry_view_filepath(geometry_view) + reference_file = self.build_reference_filepath(geometry_view, ".vtkhdf") + regression_check = GeometryViewRegressionCheck(geometry_view_file, reference_file) + self.regression_checks.append(regression_check) + + class PythonApiMixin(GprMaxMixin): """Use the GprMax Python API rather than a standard input file.""" diff --git a/reframe_tests/tests/regression_checks.py b/reframe_tests/tests/regression_checks.py index 51e1cdc9..e6adcf7d 100644 --- a/reframe_tests/tests/regression_checks.py +++ b/reframe_tests/tests/regression_checks.py @@ -74,7 +74,12 @@ class RegressionCheck: h5diff = "h5diff" h5diff_output = osext.run_command( - [h5diff, *self.h5diff_options, str(self.output_file), str(self.reference_file)] + [ + h5diff, + *self.h5diff_options, + str(self.output_file.absolute()), + str(self.reference_file), + ] ) return sn.assert_true( @@ -135,3 +140,11 @@ class SnapshotRegressionCheck(RegressionCheck): @property def error_msg(self) -> str: return f"Snapshot '{self.output_file.name}' failed regression check " + + +class GeometryViewRegressionCheck(RegressionCheck): + """Run regression check on a GprMax GeometryView.""" + + @property + def error_msg(self) -> str: + return f"GeometryView '{self.output_file.name}' failed regression check " diff --git a/reframe_tests/tests/src/geometry_view_tests/geometry_view_fine.in b/reframe_tests/tests/src/geometry_view_tests/geometry_view_fine.in new file mode 100644 index 00000000..d6709644 --- /dev/null +++ b/reframe_tests/tests/src/geometry_view_tests/geometry_view_fine.in @@ -0,0 +1,17 @@ +#title: Hertzian dipole in free-space +#domain: 0.100 0.100 0.100 +#dx_dy_dz: 0.001 0.001 0.001 +#time_window: 3e-9 + +#waveform: gaussiandot 1 1e9 myWave +#hertzian_dipole: z 0.050 0.050 0.050 myWave + +#material: 4.9 0 1 0 myWater +#material: 2 0 1.4 0 unusedMaterial +#material: 3 0 2 0 boxMaterial + +#sphere: 0.05 0.05 0.05 0.03 myWater y +#box: 0.01 0.01 0.01 0.09 0.025 0.025 boxMaterial y + +#geometry_view: 0.02 0.02 0.02 0.06 0.06 0.06 0.001 0.001 0.001 partial_volume f +#geometry_view: 0 0 0 0.1 0.1 0.1 0.001 0.001 0.001 full_volume f diff --git a/reframe_tests/tests/src/geometry_view_tests/geometry_view_voxel.in b/reframe_tests/tests/src/geometry_view_tests/geometry_view_voxel.in new file mode 100644 index 00000000..64596d82 --- /dev/null +++ b/reframe_tests/tests/src/geometry_view_tests/geometry_view_voxel.in @@ -0,0 +1,17 @@ +#title: Hertzian dipole in free-space +#domain: 0.100 0.100 0.100 +#dx_dy_dz: 0.001 0.001 0.001 +#time_window: 3e-9 + +#waveform: gaussiandot 1 1e9 myWave +#hertzian_dipole: z 0.050 0.050 0.050 myWave + +#material: 4.9 0 1 0 myWater +#material: 2 0 1.4 0 unusedMaterial +#material: 3 0 2 0 boxMaterial + +#sphere: 0.05 0.05 0.05 0.03 myWater y +#box: 0.01 0.01 0.01 0.09 0.025 0.025 boxMaterial y + +#geometry_view: 0.02 0.02 0.02 0.06 0.06 0.06 0.001 0.001 0.001 partial_volume n +#geometry_view: 0 0 0 0.1 0.1 0.1 0.001 0.001 0.001 full_volume n diff --git a/reframe_tests/tests/standard_tests.py b/reframe_tests/tests/standard_tests.py index 98f23c06..9f268cb2 100644 --- a/reframe_tests/tests/standard_tests.py +++ b/reframe_tests/tests/standard_tests.py @@ -1,5 +1,10 @@ from reframe_tests.tests.base_tests import GprMaxBaseTest -from reframe_tests.tests.mixins import ReceiverMixin, SnapshotMixin +from reframe_tests.tests.mixins import ( + GeometryOnlyMixin, + GeometryViewMixin, + ReceiverMixin, + SnapshotMixin, +) class GprMaxRegressionTest(ReceiverMixin, GprMaxBaseTest): @@ -8,3 +13,7 @@ class GprMaxRegressionTest(ReceiverMixin, GprMaxBaseTest): class GprMaxSnapshotTest(SnapshotMixin, GprMaxBaseTest): pass + + +class GprMaxGeometryViewTest(GeometryViewMixin, GeometryOnlyMixin, GprMaxBaseTest): + pass diff --git a/reframe_tests/tests/test_geometry_views.py b/reframe_tests/tests/test_geometry_views.py new file mode 100644 index 00000000..8f4091d5 --- /dev/null +++ b/reframe_tests/tests/test_geometry_views.py @@ -0,0 +1,20 @@ +import reframe as rfm +from reframe.core.builtins import parameter + +from reframe_tests.tests.mixins import MpiMixin +from reframe_tests.tests.standard_tests import GprMaxGeometryViewTest + + +@rfm.simple_test +class TestGeometryView(GprMaxGeometryViewTest): + tags = {"test", "serial", "geometry only", "geometry view"} + sourcesdir = "src/geometry_view_tests" + model = parameter(["geometry_view_voxel", "geometry_view_fine"]) + geometry_views = ["partial_volume", "full_volume"] + + +@rfm.simple_test +class TestGeometryViewMPI(MpiMixin, TestGeometryView): + tags = {"test", "mpi", "geometry only", "geometry view"} + mpi_layout = parameter([[2, 2, 2], [4, 4, 1]]) + test_dependency = TestGeometryView From 98507608e56449b859e94992c42b59e8d6b439bf Mon Sep 17 00:00:00 2001 From: nmannall Date: Thu, 5 Dec 2024 12:14:55 +0000 Subject: [PATCH 28/31] Add geometry object reframe tests --- .gitattributes | 2 + .../full_volume.h5 | 3 + .../full_volume_materials.txt | 9 +++ .../partial_volume.h5 | 3 + .../partial_volume_materials.txt | 9 +++ reframe_tests/tests/mixins.py | 61 ++++++++++++++++- reframe_tests/tests/regression_checks.py | 68 +++++++++++++------ .../geometry_object_write.in | 17 +++++ reframe_tests/tests/standard_tests.py | 5 ++ reframe_tests/tests/test_geometry_objects.py | 20 ++++++ 10 files changed, 174 insertions(+), 23 deletions(-) create mode 100644 reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume.h5 create mode 100644 reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume_materials.txt create mode 100644 reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume.h5 create mode 100644 reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume_materials.txt create mode 100644 reframe_tests/tests/src/geometry_object_tests/geometry_object_write.in create mode 100644 reframe_tests/tests/test_geometry_objects.py diff --git a/.gitattributes b/.gitattributes index 73f2c564..bec33da4 100644 --- a/.gitattributes +++ b/.gitattributes @@ -2,3 +2,5 @@ tools/Jupyter_notebooks/* linguist-vendored reframe_tests/regression_checks/TestGeometryView_5176823e/partial_volume.vtkhdf filter=lfs diff=lfs merge=lfs -text reframe_tests/regression_checks/TestGeometryView_5176823e/full_volume.vtkhdf filter=lfs diff=lfs merge=lfs -text reframe_tests/regression_checks/TestGeometryView_77980202/full_volume.vtkhdf filter=lfs diff=lfs merge=lfs -text +reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume.h5 filter=lfs diff=lfs merge=lfs -text +reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume.h5 filter=lfs diff=lfs merge=lfs -text diff --git a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume.h5 b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume.h5 new file mode 100644 index 00000000..0cd34b68 --- /dev/null +++ b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume.h5 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ac30e1b942acb3e17bec7d2a16167429ed3b1590642d9c403ee5d0adb6be27c8 +size 75462640 diff --git a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume_materials.txt b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume_materials.txt new file mode 100644 index 00000000..d7a4e603 --- /dev/null +++ b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume_materials.txt @@ -0,0 +1,9 @@ +#material: 1 inf 1 0 pec +#material: 1 0 1 0 free_space +#material: 4.9 0 1 0 myWater +#material: 3 0 2 0 boxMaterial +#material: 1.5 0 1.25 0 boxMaterial+free_space+free_space+free_space +#material: 2 0 1.5 0 boxMaterial+free_space+free_space+boxMaterial +#material: 1.975 0 1 0 myWater+free_space+free_space+free_space +#material: 2.95 0 1 0 myWater+free_space+free_space+myWater +#material: 3.925 0 1 0 myWater+myWater+free_space+myWater diff --git a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume.h5 b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume.h5 new file mode 100644 index 00000000..b46a2c6b --- /dev/null +++ b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume.h5 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:224975ddea1c1a4cd3f99aaa9e7ce1fb1ec31de07b0352424f66be73a5645a4f +size 4980400 diff --git a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume_materials.txt b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume_materials.txt new file mode 100644 index 00000000..d7a4e603 --- /dev/null +++ b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume_materials.txt @@ -0,0 +1,9 @@ +#material: 1 inf 1 0 pec +#material: 1 0 1 0 free_space +#material: 4.9 0 1 0 myWater +#material: 3 0 2 0 boxMaterial +#material: 1.5 0 1.25 0 boxMaterial+free_space+free_space+free_space +#material: 2 0 1.5 0 boxMaterial+free_space+free_space+boxMaterial +#material: 1.975 0 1 0 myWater+free_space+free_space+free_space +#material: 2.95 0 1 0 myWater+free_space+free_space+myWater +#material: 3.925 0 1 0 myWater+myWater+free_space+myWater diff --git a/reframe_tests/tests/mixins.py b/reframe_tests/tests/mixins.py index 2bd1a04f..f7181ce9 100644 --- a/reframe_tests/tests/mixins.py +++ b/reframe_tests/tests/mixins.py @@ -9,9 +9,11 @@ from typing_extensions import TYPE_CHECKING from reframe_tests.tests.base_tests import GprMaxBaseTest from reframe_tests.tests.regression_checks import ( + GeometryObjectMaterialsRegressionCheck, + GeometryObjectRegressionCheck, GeometryViewRegressionCheck, + H5RegressionCheck, ReceiverRegressionCheck, - RegressionCheck, SnapshotRegressionCheck, ) @@ -38,7 +40,7 @@ class ReceiverMixin(GprMaxMixin): ) self.regression_checks.append(regression_check) else: - regression_check = RegressionCheck(self.output_file, reference_file) + regression_check = H5RegressionCheck(self.output_file, reference_file) self.regression_checks.append(regression_check) @@ -86,6 +88,61 @@ class GeometryOnlyMixin(GprMaxMixin): self.executable_opts += ["--geometry-only"] +class GeometryObjectMixin(GprMaxMixin): + """Add regression tests for geometry objects. + + Attributes: + geometry_objects (list[str]): List of geometry objects to run + regression checks on. + """ + + geometry_objects = variable(typ.List[str], value=[]) + + def build_geometry_object_filepath(self, geometry_object: str) -> Path: + """Build filepath to the specified geometry object. + + Args: + geometry_object: Name of the geometry object. + """ + return Path(geometry_object).with_suffix(".h5") + + def build_materials_filepath(self, geometry_object: str) -> Path: + """Build filepath to the materials output by the geometry object. + + Args: + geometry_object: Name of the geometry object. + """ + return Path(f"{geometry_object}_materials").with_suffix(".txt") + + @run_after("setup") + def add_geometry_object_regression_checks(self): + """Add a regression check for each geometry object. + + The test will be skipped if no geometry objects have been specified. + """ + self.skip_if( + len(self.geometry_objects) < 0, + f"Must provide a list of geometry objects.", + ) + + for geometry_object in self.geometry_objects: + # Add materials regression check first as if this fails, + # checking the .h5 file will almost definitely fail. + materials_file = self.build_materials_filepath(geometry_object) + materials_reference_file = self.build_reference_filepath( + materials_file.name, suffix=materials_file.suffix + ) + materials_regression_check = GeometryObjectMaterialsRegressionCheck( + materials_file, materials_reference_file + ) + self.regression_checks.append(materials_regression_check) + + geometry_object_file = self.build_geometry_object_filepath(geometry_object) + reference_file = self.build_reference_filepath(geometry_object) + regression_check = GeometryObjectRegressionCheck(geometry_object_file, reference_file) + self.regression_checks.append(regression_check) + + class GeometryViewMixin(GprMaxMixin): """Add regression tests for geometry views. diff --git a/reframe_tests/tests/regression_checks.py b/reframe_tests/tests/regression_checks.py index e6adcf7d..85c91b89 100644 --- a/reframe_tests/tests/regression_checks.py +++ b/reframe_tests/tests/regression_checks.py @@ -9,7 +9,7 @@ from reframe.utility import osext class RegressionCheck: - """Compare two hdf5 files using h5diff""" + """Compare two files using diff""" def __init__( self, output_file: Union[str, PathLike], reference_file: Union[str, PathLike] @@ -23,7 +23,8 @@ class RegressionCheck: """ self.output_file = Path(output_file) self.reference_file = Path(reference_file) - self.h5diff_options: list[str] = [] + self.cmd = "diff" + self.options: list[str] = [] @property def error_msg(self) -> str: @@ -57,26 +58,22 @@ class RegressionCheck: return sn.path_isfile(self.reference_file) def run(self) -> Literal[True]: - """Run the regression check using h5diff. + """Run the regression check. Returns: check_passed: Returns True if the output file matches the - reference file (i.e. no output from h5diff). Otherwise, + reference file (i.e. no output from diff). Otherwise, raises a SanityError. Raises: reframe.core.exceptions.SanityError: If the output file does not exist, or the regression check fails. """ - if runtime().system.name == "archer2": - h5diff = "/opt/cray/pe/hdf5/default/bin/h5diff" - else: - h5diff = "h5diff" - h5diff_output = osext.run_command( + completed_process = osext.run_command( [ - h5diff, - *self.h5diff_options, + self.cmd, + *self.options, str(self.output_file.absolute()), str(self.reference_file), ] @@ -86,16 +83,29 @@ class RegressionCheck: sn.path_isfile(self.output_file), f"Expected output file '{self.output_file}' does not exist", ) and sn.assert_false( - h5diff_output.stdout, + completed_process.stdout, ( f"{self.error_msg}\n" - f"For more details run: '{' '.join(h5diff_output.args)}'\n" + f"For more details run: '{' '.join(completed_process.args)}'\n" f"To re-create regression file, delete '{self.reference_file}' and rerun the test." ), ) -class ReceiverRegressionCheck(RegressionCheck): +class H5RegressionCheck(RegressionCheck): + """Compare two hdf5 files using h5diff""" + + def __init__( + self, output_file: Union[str, PathLike], reference_file: Union[str, PathLike] + ) -> None: + super().__init__(output_file, reference_file) + if runtime().system.name == "archer2": + self.cmd = "/opt/cray/pe/hdf5/default/bin/h5diff" + else: + self.cmd = "h5diff" + + +class ReceiverRegressionCheck(H5RegressionCheck): """Run regression check on individual reveivers in output files. This can include arbitrary receivers in each file, or two receivers @@ -106,7 +116,7 @@ class ReceiverRegressionCheck(RegressionCheck): self, output_file: Union[str, PathLike], reference_file: Union[str, PathLike], - output_receiver: Optional[str], + output_receiver: str, reference_receiver: Optional[str] = None, ) -> None: """Create a new receiver regression check. @@ -125,26 +135,42 @@ class ReceiverRegressionCheck(RegressionCheck): self.output_receiver = output_receiver self.reference_receiver = reference_receiver - self.h5diff_options.append(f"rxs/{self.output_receiver}") + self.options.append(f"rxs/{self.output_receiver}") if self.reference_receiver is not None: - self.h5diff_options.append(f"rxs/{self.reference_receiver}") + self.options.append(f"rxs/{self.reference_receiver}") @property def error_msg(self) -> str: return f"Receiver '{self.output_receiver}' failed regression check" -class SnapshotRegressionCheck(RegressionCheck): +class SnapshotRegressionCheck(H5RegressionCheck): """Run regression check on a gprMax Snapshot.""" @property def error_msg(self) -> str: - return f"Snapshot '{self.output_file.name}' failed regression check " + return f"Snapshot '{self.output_file.name}' failed regression check" -class GeometryViewRegressionCheck(RegressionCheck): +class GeometryObjectRegressionCheck(H5RegressionCheck): + """Run regression check on a GprMax GeometryObject.""" + + @property + def error_msg(self) -> str: + return f"GeometryObject '{self.output_file.name}' failed regression check" + + +class GeometryObjectMaterialsRegressionCheck(RegressionCheck): + """Run regression check on materials output by a GeometryObject.""" + + @property + def error_msg(self) -> str: + return f"GeometryObject materials file '{self.output_file}' failed regression check" + + +class GeometryViewRegressionCheck(H5RegressionCheck): """Run regression check on a GprMax GeometryView.""" @property def error_msg(self) -> str: - return f"GeometryView '{self.output_file.name}' failed regression check " + return f"GeometryView '{self.output_file.name}' failed regression check" diff --git a/reframe_tests/tests/src/geometry_object_tests/geometry_object_write.in b/reframe_tests/tests/src/geometry_object_tests/geometry_object_write.in new file mode 100644 index 00000000..25110c67 --- /dev/null +++ b/reframe_tests/tests/src/geometry_object_tests/geometry_object_write.in @@ -0,0 +1,17 @@ +#title: Hertzian dipole in free-space +#domain: 0.100 0.100 0.100 +#dx_dy_dz: 0.001 0.001 0.001 +#time_window: 3e-9 + +#waveform: gaussiandot 1 1e9 myWave +#hertzian_dipole: z 0.050 0.050 0.050 myWave + +#material: 4.9 0 1 0 myWater +#material: 2 0 1.4 0 unusedMaterial +#material: 3 0 2 0 boxMaterial + +#sphere: 0.05 0.05 0.05 0.03 myWater y +#box: 0.01 0.01 0.01 0.09 0.025 0.025 boxMaterial y + +#geometry_objects_write: 0.02 0.02 0.02 0.06 0.06 0.06 partial_volume +#geometry_objects_write: 0 0 0 0.1 0.1 0.1 full_volume diff --git a/reframe_tests/tests/standard_tests.py b/reframe_tests/tests/standard_tests.py index 9f268cb2..cab5ec7e 100644 --- a/reframe_tests/tests/standard_tests.py +++ b/reframe_tests/tests/standard_tests.py @@ -1,5 +1,6 @@ from reframe_tests.tests.base_tests import GprMaxBaseTest from reframe_tests.tests.mixins import ( + GeometryObjectMixin, GeometryOnlyMixin, GeometryViewMixin, ReceiverMixin, @@ -17,3 +18,7 @@ class GprMaxSnapshotTest(SnapshotMixin, GprMaxBaseTest): class GprMaxGeometryViewTest(GeometryViewMixin, GeometryOnlyMixin, GprMaxBaseTest): pass + + +class GprMaxGeometryObjectTest(GeometryObjectMixin, GeometryOnlyMixin, GprMaxBaseTest): + pass diff --git a/reframe_tests/tests/test_geometry_objects.py b/reframe_tests/tests/test_geometry_objects.py new file mode 100644 index 00000000..2cad4fbc --- /dev/null +++ b/reframe_tests/tests/test_geometry_objects.py @@ -0,0 +1,20 @@ +import reframe as rfm +from reframe.core.builtins import parameter + +from reframe_tests.tests.mixins import MpiMixin +from reframe_tests.tests.standard_tests import GprMaxGeometryObjectTest + + +@rfm.simple_test +class TestGeometryObject(GprMaxGeometryObjectTest): + tags = {"test", "serial", "geometry only", "geometry object"} + sourcesdir = "src/geometry_object_tests" + model = parameter(["geometry_object_write"]) + geometry_objects = ["partial_volume", "full_volume"] + + +@rfm.simple_test +class TestGeometryObjectMPI(MpiMixin, TestGeometryObject): + tags = {"test", "mpi", "geometry only", "geometry object"} + mpi_layout = parameter([[2, 2, 2], [4, 4, 1]]) + test_dependency = TestGeometryObject From 4f2b6f412dffb6d44b9d9f9de537ffe0d15454ed Mon Sep 17 00:00:00 2001 From: nmannall Date: Thu, 5 Dec 2024 17:46:13 +0000 Subject: [PATCH 29/31] Add class function to create compound material IDs --- gprMax/cmds_geometry/box.py | 2 +- gprMax/cmds_geometry/cone.py | 2 +- gprMax/cmds_geometry/cylinder.py | 2 +- gprMax/cmds_geometry/cylindrical_sector.py | 2 +- gprMax/cmds_geometry/ellipsoid.py | 2 +- gprMax/cmds_geometry/sphere.py | 2 +- gprMax/cmds_geometry/triangle.py | 2 +- gprMax/cython/yee_cell_build.pyx | 19 +++--------------- gprMax/materials.py | 23 +++++++++++++++++++++- 9 files changed, 32 insertions(+), 24 deletions(-) diff --git a/gprMax/cmds_geometry/box.py b/gprMax/cmds_geometry/box.py index f3b8d44b..564a4132 100644 --- a/gprMax/cmds_geometry/box.py +++ b/gprMax/cmds_geometry/box.py @@ -117,7 +117,7 @@ class Box(UserObjectGeometry): numIDx = materials[0].numID numIDy = materials[1].numID numIDz = materials[2].numID - requiredID = materials[0].ID + "+" + materials[1].ID + "+" + materials[2].ID + requiredID = Material.create_compound_id(materials[0], materials[1], materials[2]) averagedmaterial = [x for x in grid.materials if x.ID == requiredID] if averagedmaterial: numID = averagedmaterial.numID diff --git a/gprMax/cmds_geometry/cone.py b/gprMax/cmds_geometry/cone.py index e572903f..54f0e2c3 100644 --- a/gprMax/cmds_geometry/cone.py +++ b/gprMax/cmds_geometry/cone.py @@ -119,7 +119,7 @@ class Cone(UserObjectGeometry): numIDx = materials[0].numID numIDy = materials[1].numID numIDz = materials[2].numID - requiredID = materials[0].ID + "+" + materials[1].ID + "+" + materials[2].ID + requiredID = Material.create_compound_id(materials[0], materials[1], materials[2]) averagedmaterial = [x for x in grid.materials if x.ID == requiredID] if averagedmaterial: numID = averagedmaterial.numID diff --git a/gprMax/cmds_geometry/cylinder.py b/gprMax/cmds_geometry/cylinder.py index 31d3deba..583645dd 100644 --- a/gprMax/cmds_geometry/cylinder.py +++ b/gprMax/cmds_geometry/cylinder.py @@ -104,7 +104,7 @@ class Cylinder(UserObjectGeometry): numIDx = materials[0].numID numIDy = materials[1].numID numIDz = materials[2].numID - requiredID = materials[0].ID + "+" + materials[1].ID + "+" + materials[2].ID + requiredID = Material.create_compound_id(materials[0], materials[1], materials[2]) averagedmaterial = [x for x in grid.materials if x.ID == requiredID] if averagedmaterial: numID = averagedmaterial.numID diff --git a/gprMax/cmds_geometry/cylindrical_sector.py b/gprMax/cmds_geometry/cylindrical_sector.py index 5ae2981c..a9d1924d 100644 --- a/gprMax/cmds_geometry/cylindrical_sector.py +++ b/gprMax/cmds_geometry/cylindrical_sector.py @@ -128,7 +128,7 @@ class CylindricalSector(UserObjectGeometry): numIDx = materials[0].numID numIDy = materials[1].numID numIDz = materials[2].numID - requiredID = f"{materials[0].ID}+{materials[1].ID}+{materials[2].ID}" + requiredID = Material.create_compound_id(materials[0], materials[1], materials[2]) averagedmaterial = [x for x in grid.materials if x.ID == requiredID] if averagedmaterial: numID = averagedmaterial.numID diff --git a/gprMax/cmds_geometry/ellipsoid.py b/gprMax/cmds_geometry/ellipsoid.py index 684b54aa..2919c41d 100644 --- a/gprMax/cmds_geometry/ellipsoid.py +++ b/gprMax/cmds_geometry/ellipsoid.py @@ -99,7 +99,7 @@ class Ellipsoid(UserObjectGeometry): numIDx = materials[0].numID numIDy = materials[1].numID numIDz = materials[2].numID - requiredID = materials[0].ID + "+" + materials[1].ID + "+" + materials[2].ID + requiredID = Material.create_compound_id(materials[0], materials[1], materials[2]) averagedmaterial = [x for x in grid.materials if x.ID == requiredID] if averagedmaterial: numID = averagedmaterial.numID diff --git a/gprMax/cmds_geometry/sphere.py b/gprMax/cmds_geometry/sphere.py index 721c692f..d3577b34 100644 --- a/gprMax/cmds_geometry/sphere.py +++ b/gprMax/cmds_geometry/sphere.py @@ -94,7 +94,7 @@ class Sphere(UserObjectGeometry): numIDx = materials[0].numID numIDy = materials[1].numID numIDz = materials[2].numID - requiredID = materials[0].ID + "+" + materials[1].ID + "+" + materials[2].ID + requiredID = Material.create_compound_id(materials[0], materials[1], materials[2]) averagedmaterial = [x for x in grid.materials if x.ID == requiredID] if averagedmaterial: numID = averagedmaterial.numID diff --git a/gprMax/cmds_geometry/triangle.py b/gprMax/cmds_geometry/triangle.py index 96853555..17d4abd7 100644 --- a/gprMax/cmds_geometry/triangle.py +++ b/gprMax/cmds_geometry/triangle.py @@ -145,7 +145,7 @@ class Triangle(UserObjectGeometry): numIDx = materials[0].numID numIDy = materials[1].numID numIDz = materials[2].numID - requiredID = materials[0].ID + "+" + materials[1].ID + "+" + materials[2].ID + requiredID = Material.create_compound_id(materials[0], materials[1], materials[2]) averagedmaterial = [x for x in grid.materials if x.ID == requiredID] if averagedmaterial: numID = averagedmaterial.numID diff --git a/gprMax/cython/yee_cell_build.pyx b/gprMax/cython/yee_cell_build.pyx index ad75205e..1412561f 100644 --- a/gprMax/cython/yee_cell_build.pyx +++ b/gprMax/cython/yee_cell_build.pyx @@ -56,16 +56,7 @@ cpdef void create_electric_average( # Make an ID composed of the names of the four materials that will # be averaged. Sort the names to ensure the same four component # materials always form the same ID. - requiredID = "+".join( - sorted( - [ - G.materials[numID1].ID, - G.materials[numID2].ID, - G.materials[numID3].ID, - G.materials[numID4].ID, - ] - ) - ) + requiredID = Material.create_compound_id(G.materials[numID1], G.materials[numID2], G.materials[numID3], G.materials[numID4]) # Check if this material already exists material = [x for x in G.materials if x.ID == requiredID] @@ -113,14 +104,10 @@ cpdef void create_magnetic_average( """ # Make an ID composed of the names of the two materials that will be averaged - requiredID = G.materials[numID1].ID + '+' + G.materials[numID2].ID + requiredID = Material.create_compound_id(G.materials[numID1], G.materials[numID2]) # Check if this material already exists - tmp = requiredID.split('+') - material = [x for x in G.materials if - (x.ID.count(tmp[0]) == requiredID.count(tmp[0]) and - x.ID.count(tmp[1]) == requiredID.count(tmp[1])) or - (x.ID.count(tmp[0]) % 2 == 0 and x.ID.count(tmp[1]) % 2 == 0)] + material = [x for x in G.materials if x.ID == requiredID] if material: G.ID[componentID, i, j, k] = material[0].numID diff --git a/gprMax/materials.py b/gprMax/materials.py index 00876ecf..152ff86a 100644 --- a/gprMax/materials.py +++ b/gprMax/materials.py @@ -53,7 +53,9 @@ class Material: if isinstance(value, Material): return self.ID == value.ID else: - return super().__eq__(value) + raise TypeError( + f"'==' not supported between instances of 'Material' and '{type(value)}'" + ) def __lt__(self, value: object) -> bool: if isinstance(value, Material): @@ -71,6 +73,25 @@ class Material: f"'>' not supported between instances of 'Material' and '{type(value)}'" ) + @staticmethod + def create_compound_id(*materials: "Material") -> str: + """Create a compound ID from existing materials. + + The new ID will be the IDs of the existing materials joined by a + '+' symbol. The component IDs will be sorted alphabetically and + if two materials are provided, the compound ID will contain each + material twice. + + Args: + *materials: Materials to use to create the compound ID. + + Returns: + compound_id: New compound id. + """ + if len(materials) == 2: + materials += materials + return "+".join(sorted([material.ID for material in materials])) + def calculate_update_coeffsH(self, G): """Calculates the magnetic update coefficients of the material. From 8db2383f382f62ed6186a5b1fbf142780a13dec7 Mon Sep 17 00:00:00 2001 From: nmannall Date: Thu, 5 Dec 2024 17:52:58 +0000 Subject: [PATCH 30/31] Sort output materials by ID --- gprMax/geometry_outputs.py | 24 ++++++++----------- .../full_volume.h5 | 2 +- .../full_volume_materials.txt | 13 +++++----- .../partial_volume.h5 | 2 +- .../partial_volume_materials.txt | 13 +++++----- 5 files changed, 24 insertions(+), 30 deletions(-) diff --git a/gprMax/geometry_outputs.py b/gprMax/geometry_outputs.py index b283fd5a..3416e8cb 100644 --- a/gprMax/geometry_outputs.py +++ b/gprMax/geometry_outputs.py @@ -487,17 +487,19 @@ class GeometryObjects: ID = G.ID[:, self.xs : self.xf + 1, self.ys : self.yf + 1, self.zs : self.zf + 1] - # Get materials present in subset of ID array + # Get materials present in subset of ID array and sort by material ID material_ids, inverse_map = np.unique(ID, return_inverse=True) + get_material = np.vectorize(lambda id: G.materials[id]) + materials = sorted(get_material(material_ids)) # Create map from material ID to 0 - number of materials - materials_map = {material_id: index for index, material_id in enumerate(material_ids)} + materials_map = {material.numID: index for index, material in enumerate(materials)} + map_materials = np.vectorize(lambda id: materials_map[id]) # Remap ID array to the reduced list of materials - ID = np.array([materials_map[id] for id in material_ids])[inverse_map].reshape(ID.shape) + ID = np.array(map_materials(material_ids))[inverse_map].reshape(ID.shape) data = G.solid[self.xs : self.xf, self.ys : self.yf, self.zs : self.zf].astype("int16") - map_materials = np.vectorize(lambda id: materials_map[id]) data = map_materials(data) rigidE = G.rigidE[:, self.xs : self.xf, self.ys : self.yf, self.zs : self.zf] @@ -520,8 +522,8 @@ class GeometryObjects: # Write materials list to a text file with open(self.filename_materials, "w") as fmaterials: - for numID in material_ids: - self.output_material(G.materials[numID], fmaterials) + for material in materials: + self.output_material(material, fmaterials) def output_material(self, material: Material, file: TextIOWrapper): file.write( @@ -586,15 +588,9 @@ class MPIGeometryObjects(GeometryObjects): materials = self.comm.gather(local_materials, root=0) if self.comm.rank == 0: - # Filter out duplicate materials + # Filter out duplicate materials and sort by material ID materials = np.fromiter(chain.from_iterable(materials), dtype=Material) - _, indices = np.unique(materials, return_index=True) - - # We want to sort the materials by the order they appear, - # not sorted alphabetically by ID (default behaviour using - # np.unique). This ensures user defined materials are lower - # indexed than compound materials. - global_materials = materials[sorted(indices)] + global_materials = np.unique(materials) global_material_ids = [m.ID for m in global_materials] else: diff --git a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume.h5 b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume.h5 index 0cd34b68..5d1fd6ae 100644 --- a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume.h5 +++ b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume.h5 @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:ac30e1b942acb3e17bec7d2a16167429ed3b1590642d9c403ee5d0adb6be27c8 +oid sha256:2b2fbdb39eb6ad15c35877be207a7a0d3f19cc22faad223d058988f788842f7b size 75462640 diff --git a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume_materials.txt b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume_materials.txt index d7a4e603..7270aed4 100644 --- a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume_materials.txt +++ b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume_materials.txt @@ -1,9 +1,8 @@ -#material: 1 inf 1 0 pec -#material: 1 0 1 0 free_space -#material: 4.9 0 1 0 myWater #material: 3 0 2 0 boxMaterial +#material: 2 0 1.5 0 boxMaterial+boxMaterial+free_space+free_space #material: 1.5 0 1.25 0 boxMaterial+free_space+free_space+free_space -#material: 2 0 1.5 0 boxMaterial+free_space+free_space+boxMaterial -#material: 1.975 0 1 0 myWater+free_space+free_space+free_space -#material: 2.95 0 1 0 myWater+free_space+free_space+myWater -#material: 3.925 0 1 0 myWater+myWater+free_space+myWater +#material: 1 0 1 0 free_space +#material: 1.975 0 1 0 free_space+free_space+free_space+myWater +#material: 2.95 0 1 0 free_space+free_space+myWater+myWater +#material: 3.925 0 1 0 free_space+myWater+myWater+myWater +#material: 4.9 0 1 0 myWater diff --git a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume.h5 b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume.h5 index b46a2c6b..ad7fc33f 100644 --- a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume.h5 +++ b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume.h5 @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:224975ddea1c1a4cd3f99aaa9e7ce1fb1ec31de07b0352424f66be73a5645a4f +oid sha256:6304565819cdb7c5b2af86d57bb25e3f22c92b9aae431934e2c2eca029928d59 size 4980400 diff --git a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume_materials.txt b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume_materials.txt index d7a4e603..7270aed4 100644 --- a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume_materials.txt +++ b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume_materials.txt @@ -1,9 +1,8 @@ -#material: 1 inf 1 0 pec -#material: 1 0 1 0 free_space -#material: 4.9 0 1 0 myWater #material: 3 0 2 0 boxMaterial +#material: 2 0 1.5 0 boxMaterial+boxMaterial+free_space+free_space #material: 1.5 0 1.25 0 boxMaterial+free_space+free_space+free_space -#material: 2 0 1.5 0 boxMaterial+free_space+free_space+boxMaterial -#material: 1.975 0 1 0 myWater+free_space+free_space+free_space -#material: 2.95 0 1 0 myWater+free_space+free_space+myWater -#material: 3.925 0 1 0 myWater+myWater+free_space+myWater +#material: 1 0 1 0 free_space +#material: 1.975 0 1 0 free_space+free_space+free_space+myWater +#material: 2.95 0 1 0 free_space+free_space+myWater+myWater +#material: 3.925 0 1 0 free_space+myWater+myWater+myWater +#material: 4.9 0 1 0 myWater From 742c75fa5f4c78153f640c37db07883d4fcddaa3 Mon Sep 17 00:00:00 2001 From: nmannall Date: Fri, 6 Dec 2024 14:29:04 +0000 Subject: [PATCH 31/31] Sort non-compound materials by numID --- gprMax/materials.py | 49 ++++++++++++++++--- .../full_volume.h5 | 2 +- .../full_volume_materials.txt | 4 +- .../partial_volume.h5 | 2 +- .../partial_volume_materials.txt | 4 +- 5 files changed, 47 insertions(+), 14 deletions(-) diff --git a/gprMax/materials.py b/gprMax/materials.py index 152ff86a..05c35af0 100644 --- a/gprMax/materials.py +++ b/gprMax/materials.py @@ -57,21 +57,54 @@ class Material: f"'==' not supported between instances of 'Material' and '{type(value)}'" ) - def __lt__(self, value: object) -> bool: - if isinstance(value, Material): - return self.ID < value.ID - else: + def __lt__(self, value: "Material") -> bool: + """Less than comparator for two Materials. + + Only non-compound materials (i.e. default or user added + materials) are guaranteed to have the same numID for the same + material across MPI ranks. Therefore compound materials are + sorted by ID and non-compound materials are always less than + compound materials. + """ + if not isinstance(value, Material): raise TypeError( f"'<' not supported between instances of 'Material' and '{type(value)}'" ) - - def __gt__(self, value: object) -> bool: - if isinstance(value, Material): - return self.ID > value.ID + elif self.is_compound_material() and value.is_compound_material(): + return self.ID < value.ID else: + return value.is_compound_material() or self.numID < value.numID + + def __gt__(self, value: "Material") -> bool: + """Greater than comparator for two Materials. + + Only non-compound materials (i.e. default or user added + materials) are guaranteed to have the same numID for the same + material across MPI ranks. Therefore compound materials are + sorted by ID and are always greater than non-compound materials. + """ + if not isinstance(value, Material): raise TypeError( f"'>' not supported between instances of 'Material' and '{type(value)}'" ) + elif self.is_compound_material() and value.is_compound_material(): + return self.ID > value.ID + else: + return self.is_compound_material() or self.numID > value.numID + + def is_compound_material(self) -> bool: + """Check if a material is a compound material. + + The ID of a compound material comprises of the component + material IDs joined by a '+' symbol. Therefore we check for a + compound material by looking for a '+' symbol in the material + ID. + + Returns: + is_compound_material: True if material is a compound + material. False otherwise. + """ + return self.ID.count("+") > 0 @staticmethod def create_compound_id(*materials: "Material") -> str: diff --git a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume.h5 b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume.h5 index 5d1fd6ae..fe81bea6 100644 --- a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume.h5 +++ b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume.h5 @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:2b2fbdb39eb6ad15c35877be207a7a0d3f19cc22faad223d058988f788842f7b +oid sha256:a23ccc9ed81685385ff300c0c00e70329945db364dd7eb9607a511870469c720 size 75462640 diff --git a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume_materials.txt b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume_materials.txt index 7270aed4..30a7b6a3 100644 --- a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume_materials.txt +++ b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/full_volume_materials.txt @@ -1,8 +1,8 @@ +#material: 1 0 1 0 free_space +#material: 4.9 0 1 0 myWater #material: 3 0 2 0 boxMaterial #material: 2 0 1.5 0 boxMaterial+boxMaterial+free_space+free_space #material: 1.5 0 1.25 0 boxMaterial+free_space+free_space+free_space -#material: 1 0 1 0 free_space #material: 1.975 0 1 0 free_space+free_space+free_space+myWater #material: 2.95 0 1 0 free_space+free_space+myWater+myWater #material: 3.925 0 1 0 free_space+myWater+myWater+myWater -#material: 4.9 0 1 0 myWater diff --git a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume.h5 b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume.h5 index ad7fc33f..91fe354b 100644 --- a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume.h5 +++ b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume.h5 @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:6304565819cdb7c5b2af86d57bb25e3f22c92b9aae431934e2c2eca029928d59 +oid sha256:4b8f8c4979f4c82eb4be8a9a63cfaf19408b9f493d1bf3082a8226e97542ae4a size 4980400 diff --git a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume_materials.txt b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume_materials.txt index 7270aed4..30a7b6a3 100644 --- a/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume_materials.txt +++ b/reframe_tests/regression_checks/TestGeometryObject_a6a096cb/partial_volume_materials.txt @@ -1,8 +1,8 @@ +#material: 1 0 1 0 free_space +#material: 4.9 0 1 0 myWater #material: 3 0 2 0 boxMaterial #material: 2 0 1.5 0 boxMaterial+boxMaterial+free_space+free_space #material: 1.5 0 1.25 0 boxMaterial+free_space+free_space+free_space -#material: 1 0 1 0 free_space #material: 1.975 0 1 0 free_space+free_space+free_space+myWater #material: 2.95 0 1 0 free_space+free_space+myWater+myWater #material: 3.925 0 1 0 free_space+myWater+myWater+myWater -#material: 4.9 0 1 0 myWater