|
6 | 6 |
|
7 | 7 | # --------------------------------------------------------------------------- #
|
8 | 8 |
|
| 9 | +from itertools import permutations |
9 | 10 | import os
|
10 | 11 | import sys
|
11 | 12 |
|
|
18 | 19 |
|
19 | 20 | # --------------------------------------------------------------------------- #
|
20 | 21 |
|
21 |
| -def scale_mesh(filename, scale=(1., 1., 1.)): |
22 |
| - """Scale the dimensions of a mesh by multiplying the coordinate values of |
23 |
| - the mesh by the scaling weights. |
24 |
| -
|
25 |
| - Args: |
26 |
| - filename (str): Name of a .g file created in association with a mesh. |
27 |
| - scale (tuple(float), optional): A list of scaling weights. |
28 |
| - Defaults to (1., 1., 1.). |
29 |
| -
|
30 |
| - Raises: |
31 |
| - AssertionError: The filename must correspond to an existing .g file. |
32 |
| - """ |
33 |
| - assert os.path.isfile(filename) |
34 |
| - root, ext = os.path.splitext(filename) |
35 |
| - |
36 |
| - mesh = Exodus(filename) |
37 |
| - dataset_from = mesh.dataset |
38 |
| - |
39 |
| - dataset_to = netCDF4.Dataset( |
40 |
| - root + '_scaled' + ext, mode='w', format=dataset_from.data_model) |
41 |
| - |
42 |
| - for attr in dataset_from.ncattrs(): |
43 |
| - dataset_to.setncattr(attr, dataset_from.getncattr(attr)) |
| 22 | +IDXS_EDGES_4 = [[0, 1], [1, 2], [0, 2], [0, 3], [1, 3], [2, 3]] |
44 | 23 |
|
45 |
| - dimensions = dataset_from.dimensions |
46 |
| - for k, v in dimensions.items(): |
47 |
| - dataset_to.createDimension(k, size=v.size) |
48 | 24 |
|
49 |
| - variables = dataset_from.variables |
50 |
| - for k, v in variables.items(): |
51 |
| - attrs = v.ncattrs() |
52 |
| - fargs = {} |
53 |
| - if '_FillValue' in attrs: |
54 |
| - fargs['fill_value'] = v.getncattr('_FillValue') |
55 |
| - attrs.remove('_FillValue') |
56 |
| - variable = dataset_to.createVariable( |
57 |
| - k, v.datatype, dimensions=v.dimensions, **fargs) |
58 |
| - variable[:] = v[:] |
59 |
| - for attr in attrs: |
60 |
| - variable.setncattr(attr, v.getncattr(attr)) |
| 25 | +# --------------------------------------------------------------------------- # |
61 | 26 |
|
62 |
| - mesh.close() |
63 |
| - dataset_to.variables['coordx'][:] *= scale[0] |
64 |
| - dataset_to.variables['coordy'][:] *= scale[1] |
65 |
| - dataset_to.variables['coordz'][:] *= scale[2] |
66 |
| - dataset_to.close() |
| 27 | +def convert_tet4_tet10(filename_from, filename_to=None, **kwargs): |
| 28 | + |
| 29 | + mesh_from = Exodus(filename_from) |
| 30 | + |
| 31 | + num_elements = mesh_from.get_num_elems() |
| 32 | + connectivity_to = np.zeros((num_elements, 10), dtype=int) |
| 33 | + connectivity_from = mesh_from.get_elem_connectivity_full()[:] |
| 34 | + connectivity_to[:, :4] = connectivity_from |
| 35 | + |
| 36 | + coords_from = np.stack(mesh_from.get_coords()).T |
| 37 | + num_nodes_from = len(coords_from) |
| 38 | + |
| 39 | + dims = mesh_from.dataset.dimensions |
| 40 | + num_ns = dims['num_ns_global'].size |
| 41 | + num_ss = dims['num_ss_global'].size |
| 42 | + |
| 43 | + num_side_sss = [dims[f'num_side_ss{n}'].size for n in range(1, num_ss + 1)] |
| 44 | + |
| 45 | + pairs_elem_all = {} |
| 46 | + coords_to = coords_from.tolist() |
| 47 | + for idx_elem, ids_node in enumerate(connectivity_from): |
| 48 | + pairs_elem = [tuple(ids_node[i]) for i in IDXS_EDGES_4] |
| 49 | + for idx_pair, pair in enumerate(pairs_elem): |
| 50 | + if pair in pairs_elem_all: |
| 51 | + id_node_new = pairs_elem_all[pair] |
| 52 | + else: |
| 53 | + id_node_new = num_nodes_from + len(pairs_elem_all) + 1 |
| 54 | + pairs_elem_all[pair] = id_node_new |
| 55 | + idxs_old = [p - 1 for p in pair] |
| 56 | + coords_to.append(np.mean(coords_from[idxs_old, :], axis=0)) |
| 57 | + connectivity_to[idx_elem, 4 + idx_pair] = id_node_new |
| 58 | + |
| 59 | + ids_node_set = mesh_from.get_node_set_ids() |
| 60 | + sets_nodes = [] |
| 61 | + for id_node_set in ids_node_set: |
| 62 | + ids_node = mesh_from.get_node_set_nodes(id_node_set).tolist() |
| 63 | + pairs = permutations(ids_node, 2) |
| 64 | + for pair in pairs: |
| 65 | + id_new = pairs_elem_all.get(pair, pairs_elem_all.get(pair[::-1])) |
| 66 | + if id_new not in ids_node and id_new is not None: |
| 67 | + ids_node.append(id_new) |
| 68 | + sets_nodes.append(ids_node) |
| 69 | + |
| 70 | + num_nod_nss = [len(s) for s in sets_nodes] |
| 71 | + |
| 72 | + ids_blk = mesh_from.get_elem_blk_ids() |
| 73 | + define_maps = True |
| 74 | + nums_elems = [mesh_from.get_num_elems_in_blk(i) for i in ids_blk] |
| 75 | + nums_nodes = [10 for i in ids_blk] |
| 76 | + nums_attrs = [0 for i in ids_blk] |
| 77 | + elem_types = ['TETRA10' for i in ids_blk] |
| 78 | + |
| 79 | + dict_attrs, dict_dimensions, dict_variables = mesh_from._get_dicts_netcdf() |
| 80 | + |
| 81 | + if filename_to is None: |
| 82 | + filename_to = '_tet10'.join(os.path.splitext(filename_from)) |
| 83 | + mesh_to = Exodus( |
| 84 | + filename_to, |
| 85 | + mode='w', |
| 86 | + num_info=mesh_from.get_num_info_records() + 1, |
| 87 | + num_dim=mesh_from.numDim, |
| 88 | + num_nodes=len(coords_to), |
| 89 | + num_elem=num_elements, |
| 90 | + num_el_blk=len(ids_blk), |
| 91 | + num_node_sets=num_ns, |
| 92 | + num_side_sets=num_ss, |
| 93 | + num_nod_nss=num_nod_nss, |
| 94 | + num_side_sss=num_side_sss, |
| 95 | + title=kwargs.get('title', mesh_from.get_title()), |
| 96 | + **kwargs) |
| 97 | + |
| 98 | + mesh_to.put_coords(*np.array(coords_to).T) |
| 99 | + mesh_to.put_concat_elem_blk( |
| 100 | + ids_blk, elem_types, nums_elems, nums_nodes, nums_attrs, define_maps) |
| 101 | + variables = mesh_to.dataset.variables |
| 102 | + for i in ids_blk: |
| 103 | + idxs_elem = mesh_from.get_idxs_elem_in_blk(i) |
| 104 | + variables[f'connect{i}'][:] = connectivity_to[idxs_elem] |
| 105 | + |
| 106 | + ids_side_set = mesh_from.get_side_set_ids() |
| 107 | + for id_side_set in ids_side_set: |
| 108 | + ids_elem, ids_side = mesh_from.get_side_set(id_side_set) |
| 109 | + num_sides = mesh_from.get_num_faces_in_side_set(id_side_set) |
| 110 | + mesh_to.put_side_set_params(id_side_set, num_sides) |
| 111 | + mesh_to.put_side_set(id_side_set, ids_elem, ids_side) |
| 112 | + |
| 113 | + for id_node_set, ids_node in zip(ids_node_set, sets_nodes): |
| 114 | + mesh_to.put_node_set_params(id_node_set, len(ids_node)) |
| 115 | + mesh_to.put_node_set(id_node_set, ids_node) |
| 116 | + |
| 117 | + return mesh_to |
67 | 118 |
|
68 | 119 |
|
69 | 120 | def create_sets_canonical(filename):
|
@@ -208,6 +259,54 @@ def create_sets_canonical(filename):
|
208 | 259 | dataset_to.close()
|
209 | 260 |
|
210 | 261 |
|
| 262 | +def scale_mesh(filename, scale=(1., 1., 1.)): |
| 263 | + """Scale the dimensions of a mesh by multiplying the coordinate values of |
| 264 | + the mesh by the scaling weights. |
| 265 | +
|
| 266 | + Args: |
| 267 | + filename (str): Name of a .g file created in association with a mesh. |
| 268 | + scale (tuple(float), optional): A list of scaling weights. |
| 269 | + Defaults to (1., 1., 1.). |
| 270 | +
|
| 271 | + Raises: |
| 272 | + AssertionError: The filename must correspond to an existing .g file. |
| 273 | + """ |
| 274 | + assert os.path.isfile(filename) |
| 275 | + root, ext = os.path.splitext(filename) |
| 276 | + |
| 277 | + mesh = Exodus(filename) |
| 278 | + dataset_from = mesh.dataset |
| 279 | + |
| 280 | + dataset_to = netCDF4.Dataset( |
| 281 | + root + '_scaled' + ext, mode='w', format=dataset_from.data_model) |
| 282 | + |
| 283 | + for attr in dataset_from.ncattrs(): |
| 284 | + dataset_to.setncattr(attr, dataset_from.getncattr(attr)) |
| 285 | + |
| 286 | + dimensions = dataset_from.dimensions |
| 287 | + for k, v in dimensions.items(): |
| 288 | + dataset_to.createDimension(k, size=v.size) |
| 289 | + |
| 290 | + variables = dataset_from.variables |
| 291 | + for k, v in variables.items(): |
| 292 | + attrs = v.ncattrs() |
| 293 | + fargs = {} |
| 294 | + if '_FillValue' in attrs: |
| 295 | + fargs['fill_value'] = v.getncattr('_FillValue') |
| 296 | + attrs.remove('_FillValue') |
| 297 | + variable = dataset_to.createVariable( |
| 298 | + k, v.datatype, dimensions=v.dimensions, **fargs) |
| 299 | + variable[:] = v[:] |
| 300 | + for attr in attrs: |
| 301 | + variable.setncattr(attr, v.getncattr(attr)) |
| 302 | + |
| 303 | + mesh.close() |
| 304 | + dataset_to.variables['coordx'][:] *= scale[0] |
| 305 | + dataset_to.variables['coordy'][:] *= scale[1] |
| 306 | + dataset_to.variables['coordz'][:] *= scale[2] |
| 307 | + dataset_to.close() |
| 308 | + |
| 309 | + |
211 | 310 | # --------------------------------------------------------------------------- #
|
212 | 311 |
|
213 | 312 | if __name__ == '__main__':
|
|
0 commit comments