Skip to content

Commit f4f1b46

Browse files
committed
Added routine to convert tet4 mesh to tet10 mesh by adding midside nodes
1 parent 9d934d2 commit f4f1b46

File tree

5 files changed

+174
-50
lines changed

5 files changed

+174
-50
lines changed

exodus_helper/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
__version_info__ = parse(__version__).release
1414

1515
from .core import CONNECTIVITY_SIDES
16+
from .core import SIDES_CONNECTIVITY
1617
from .core import Exodus
1718
from .core import add_variable
1819
from .core import get_data_exodus
@@ -27,6 +28,7 @@
2728

2829
from .topology import RectangularPrism
2930

31+
from .reconfigure_mesh import convert_tet4_tet10
3032
from .reconfigure_mesh import create_sets_canonical
3133
from .reconfigure_mesh import scale_mesh
3234

exodus_helper/core.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,12 @@
3636
(1, 2, 4): 1,
3737
(2, 3, 4): 2,
3838
(1, 3, 4): 3,
39-
(1, 2, 3): 4}}
39+
(1, 2, 3): 4},
40+
'TETRA10': {
41+
(1, 5, 2, 9, 4, 8): 1,
42+
(2, 6, 3, 10, 4, 9): 2,
43+
(1, 7, 3, 10, 4, 8): 3,
44+
(1, 5, 2, 6, 3, 7): 4}}
4045

4146
items_connectivity = CONNECTIVITY_SIDES.items()
4247
SIDES_CONNECTIVITY = {
@@ -2041,7 +2046,7 @@ def put_elem_connectivity(self, id_blk, connectivity) -> bool:
20412046
"""Store the nodal connectivity for an element block.
20422047
20432048
Args:
2044-
connectivity (numpy.ndarray(float)): A 2D array of node indices.
2049+
connectivity (numpy.ndarray(int)): A 1D array of node ids.
20452050
20462051
Returns:
20472052
`True` if successful, otherwise returns `False`.
@@ -2050,8 +2055,11 @@ def put_elem_connectivity(self, id_blk, connectivity) -> bool:
20502055
num_elems = self.get_num_elems_in_blk(id_blk)
20512056
num_nodes = self.get_num_nodes_per_elem(id_blk)
20522057
connect = np.reshape(connectivity, (num_elems, num_nodes))
2053-
self.dataset.variables[f'connect{idx}'][:] = connect
2054-
return True
2058+
try:
2059+
self.dataset.variables[f'connect{idx}'][:] = connect
2060+
return True
2061+
except KeyError:
2062+
return False
20552063

20562064
def put_elem_id_map(self, elem_id_map) -> bool:
20572065
"""Store an element ID mapping.

exodus_helper/dir_exodus.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -238,16 +238,16 @@
238238
'put_coords',
239239
'put_elem_attr',
240240
'put_elem_attr_names',
241-
'put_elem_attr_values',
242-
'put_elem_blk_info',
241+
'put_elem_attr_values',
242+
'put_elem_blk_info',
243243
'put_elem_blk_name',
244244
'put_elem_blk_names',
245245
'put_elem_connectivity',
246246
'put_elem_face_conn',
247247
'put_elem_id_map',
248248
'put_element_attribute_names',
249249
'put_element_property_value',
250-
'put_element_variable_name',
250+
'put_element_variable_name',
251251
'put_element_variable_values',
252252
'put_face_count_per_polyhedra',
253253
'put_face_node_conn',

exodus_helper/reconfigure_mesh.py

Lines changed: 142 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66

77
# --------------------------------------------------------------------------- #
88

9+
from itertools import permutations
910
import os
1011
import sys
1112

@@ -18,52 +19,102 @@
1819

1920
# --------------------------------------------------------------------------- #
2021

21-
def scale_mesh(filename, scale=(1., 1., 1.)):
22-
"""Scale the dimensions of a mesh by multiplying the coordinate values of
23-
the mesh by the scaling weights.
24-
25-
Args:
26-
filename (str): Name of a .g file created in association with a mesh.
27-
scale (tuple(float), optional): A list of scaling weights.
28-
Defaults to (1., 1., 1.).
29-
30-
Raises:
31-
AssertionError: The filename must correspond to an existing .g file.
32-
"""
33-
assert os.path.isfile(filename)
34-
root, ext = os.path.splitext(filename)
35-
36-
mesh = Exodus(filename)
37-
dataset_from = mesh.dataset
38-
39-
dataset_to = netCDF4.Dataset(
40-
root + '_scaled' + ext, mode='w', format=dataset_from.data_model)
41-
42-
for attr in dataset_from.ncattrs():
43-
dataset_to.setncattr(attr, dataset_from.getncattr(attr))
22+
IDXS_EDGES_4 = [[0, 1], [1, 2], [0, 2], [0, 3], [1, 3], [2, 3]]
4423

45-
dimensions = dataset_from.dimensions
46-
for k, v in dimensions.items():
47-
dataset_to.createDimension(k, size=v.size)
4824

49-
variables = dataset_from.variables
50-
for k, v in variables.items():
51-
attrs = v.ncattrs()
52-
fargs = {}
53-
if '_FillValue' in attrs:
54-
fargs['fill_value'] = v.getncattr('_FillValue')
55-
attrs.remove('_FillValue')
56-
variable = dataset_to.createVariable(
57-
k, v.datatype, dimensions=v.dimensions, **fargs)
58-
variable[:] = v[:]
59-
for attr in attrs:
60-
variable.setncattr(attr, v.getncattr(attr))
25+
# --------------------------------------------------------------------------- #
6126

62-
mesh.close()
63-
dataset_to.variables['coordx'][:] *= scale[0]
64-
dataset_to.variables['coordy'][:] *= scale[1]
65-
dataset_to.variables['coordz'][:] *= scale[2]
66-
dataset_to.close()
27+
def convert_tet4_tet10(filename_from, filename_to=None, **kwargs):
28+
29+
mesh_from = Exodus(filename_from)
30+
31+
num_elements = mesh_from.get_num_elems()
32+
connectivity_to = np.zeros((num_elements, 10), dtype=int)
33+
connectivity_from = mesh_from.get_elem_connectivity_full()[:]
34+
connectivity_to[:, :4] = connectivity_from
35+
36+
coords_from = np.stack(mesh_from.get_coords()).T
37+
num_nodes_from = len(coords_from)
38+
39+
dims = mesh_from.dataset.dimensions
40+
num_ns = dims['num_ns_global'].size
41+
num_ss = dims['num_ss_global'].size
42+
43+
num_side_sss = [dims[f'num_side_ss{n}'].size for n in range(1, num_ss + 1)]
44+
45+
pairs_elem_all = {}
46+
coords_to = coords_from.tolist()
47+
for idx_elem, ids_node in enumerate(connectivity_from):
48+
pairs_elem = [tuple(ids_node[i]) for i in IDXS_EDGES_4]
49+
for idx_pair, pair in enumerate(pairs_elem):
50+
if pair in pairs_elem_all:
51+
id_node_new = pairs_elem_all[pair]
52+
else:
53+
id_node_new = num_nodes_from + len(pairs_elem_all) + 1
54+
pairs_elem_all[pair] = id_node_new
55+
idxs_old = [p - 1 for p in pair]
56+
coords_to.append(np.mean(coords_from[idxs_old, :], axis=0))
57+
connectivity_to[idx_elem, 4 + idx_pair] = id_node_new
58+
59+
ids_node_set = mesh_from.get_node_set_ids()
60+
sets_nodes = []
61+
for id_node_set in ids_node_set:
62+
ids_node = mesh_from.get_node_set_nodes(id_node_set).tolist()
63+
pairs = permutations(ids_node, 2)
64+
for pair in pairs:
65+
id_new = pairs_elem_all.get(pair, pairs_elem_all.get(pair[::-1]))
66+
if id_new not in ids_node and id_new is not None:
67+
ids_node.append(id_new)
68+
sets_nodes.append(ids_node)
69+
70+
num_nod_nss = [len(s) for s in sets_nodes]
71+
72+
ids_blk = mesh_from.get_elem_blk_ids()
73+
define_maps = True
74+
nums_elems = [mesh_from.get_num_elems_in_blk(i) for i in ids_blk]
75+
nums_nodes = [10 for i in ids_blk]
76+
nums_attrs = [0 for i in ids_blk]
77+
elem_types = ['TETRA10' for i in ids_blk]
78+
79+
dict_attrs, dict_dimensions, dict_variables = mesh_from._get_dicts_netcdf()
80+
81+
if filename_to is None:
82+
filename_to = '_tet10'.join(os.path.splitext(filename_from))
83+
mesh_to = Exodus(
84+
filename_to,
85+
mode='w',
86+
num_info=mesh_from.get_num_info_records() + 1,
87+
num_dim=mesh_from.numDim,
88+
num_nodes=len(coords_to),
89+
num_elem=num_elements,
90+
num_el_blk=len(ids_blk),
91+
num_node_sets=num_ns,
92+
num_side_sets=num_ss,
93+
num_nod_nss=num_nod_nss,
94+
num_side_sss=num_side_sss,
95+
title=kwargs.get('title', mesh_from.get_title()),
96+
**kwargs)
97+
98+
mesh_to.put_coords(*np.array(coords_to).T)
99+
mesh_to.put_concat_elem_blk(
100+
ids_blk, elem_types, nums_elems, nums_nodes, nums_attrs, define_maps)
101+
variables = mesh_to.dataset.variables
102+
for i in ids_blk:
103+
idxs_elem = mesh_from.get_idxs_elem_in_blk(i)
104+
variables[f'connect{i}'][:] = connectivity_to[idxs_elem]
105+
106+
ids_side_set = mesh_from.get_side_set_ids()
107+
for id_side_set in ids_side_set:
108+
ids_elem, ids_side = mesh_from.get_side_set(id_side_set)
109+
num_sides = mesh_from.get_num_faces_in_side_set(id_side_set)
110+
mesh_to.put_side_set_params(id_side_set, num_sides)
111+
mesh_to.put_side_set(id_side_set, ids_elem, ids_side)
112+
113+
for id_node_set, ids_node in zip(ids_node_set, sets_nodes):
114+
mesh_to.put_node_set_params(id_node_set, len(ids_node))
115+
mesh_to.put_node_set(id_node_set, ids_node)
116+
117+
return mesh_to
67118

68119

69120
def create_sets_canonical(filename):
@@ -208,6 +259,54 @@ def create_sets_canonical(filename):
208259
dataset_to.close()
209260

210261

262+
def scale_mesh(filename, scale=(1., 1., 1.)):
263+
"""Scale the dimensions of a mesh by multiplying the coordinate values of
264+
the mesh by the scaling weights.
265+
266+
Args:
267+
filename (str): Name of a .g file created in association with a mesh.
268+
scale (tuple(float), optional): A list of scaling weights.
269+
Defaults to (1., 1., 1.).
270+
271+
Raises:
272+
AssertionError: The filename must correspond to an existing .g file.
273+
"""
274+
assert os.path.isfile(filename)
275+
root, ext = os.path.splitext(filename)
276+
277+
mesh = Exodus(filename)
278+
dataset_from = mesh.dataset
279+
280+
dataset_to = netCDF4.Dataset(
281+
root + '_scaled' + ext, mode='w', format=dataset_from.data_model)
282+
283+
for attr in dataset_from.ncattrs():
284+
dataset_to.setncattr(attr, dataset_from.getncattr(attr))
285+
286+
dimensions = dataset_from.dimensions
287+
for k, v in dimensions.items():
288+
dataset_to.createDimension(k, size=v.size)
289+
290+
variables = dataset_from.variables
291+
for k, v in variables.items():
292+
attrs = v.ncattrs()
293+
fargs = {}
294+
if '_FillValue' in attrs:
295+
fargs['fill_value'] = v.getncattr('_FillValue')
296+
attrs.remove('_FillValue')
297+
variable = dataset_to.createVariable(
298+
k, v.datatype, dimensions=v.dimensions, **fargs)
299+
variable[:] = v[:]
300+
for attr in attrs:
301+
variable.setncattr(attr, v.getncattr(attr))
302+
303+
mesh.close()
304+
dataset_to.variables['coordx'][:] *= scale[0]
305+
dataset_to.variables['coordy'][:] *= scale[1]
306+
dataset_to.variables['coordz'][:] *= scale[2]
307+
dataset_to.close()
308+
309+
211310
# --------------------------------------------------------------------------- #
212311

213312
if __name__ == '__main__':

tests/test_exodus_helper.py

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
from exodus_helper.dir_exodus import _dir_exodus_full
2020
import exodus_helper
2121
from exodus_helper.dir_exodus import _attr_inputs
22+
from exodus_helper.reconfigure_mesh import IDXS_EDGES_4
2223

2324

2425
# --------------------------------------------------------------------------- #
@@ -1820,6 +1821,20 @@ def test_calculate_volumes_block(dir_test_file):
18201821

18211822
# Testing the reconfigure_mesh module --------------------------------------- #
18221823

1824+
def test_convert_tet4_tet10(dir_test_file):
1825+
file_path = os.path.join(dir_test_file, 'test_convert_tet.g')
1826+
mesh_converted = exodus_helper.convert_tet4_tet10(file_path)
1827+
connectivity = mesh_converted.get_elem_connectivity_full()[:]
1828+
coords = np.stack(mesh_converted.get_coords()).T
1829+
for ids_node in connectivity:
1830+
for idx, edge in enumerate(IDXS_EDGES_4):
1831+
coords_0 = coords[ids_node[edge[0]] - 1]
1832+
coords_1 = coords[ids_node[edge[1]] - 1]
1833+
coords_2 = coords[ids_node[idx + 4] - 1]
1834+
assert np.allclose(0.5 * (coords_0 + coords_1), coords_2)
1835+
os.remove(os.path.join(dir_test_file, 'test_convert_tet_tet10.g'))
1836+
1837+
18231838
def test_create_sets_canonical(dir_test_file):
18241839
file_path = os.path.join(dir_test_file, 'test_create_sets_canonical.g')
18251840
exodus_helper.create_sets_canonical(file_path)

0 commit comments

Comments
 (0)