Coverage for mesh_api/mesh_api.py: 50%
1139 statements
« prev ^ index » next coverage.py v7.7.0, created at 2025-03-20 20:51 +0100
« prev ^ index » next coverage.py v7.7.0, created at 2025-03-20 20:51 +0100
1"""
2Mesh API
3"""
4import logging
5import os
6import warnings
7from collections import Counter, defaultdict
8from io import BytesIO
9from itertools import chain, combinations
11import numpy as np
12import pandas as pd
14try:
15 import pyvista as pv
17 ISPYVISTA = True
18except ImportError:
19 ISPYVISTA = False
20import vg
22try:
23 import vtk
25 ISVTK = True
26except ImportError:
27 ISVTK = False
29from numtools.csyslib import Register as CSysReg
30from numtools.intzip import hzip
31from numtools.vgextended import angle as angle_0_pi
32from numtools.vgextended import loc_array
34import nastranio.cards as cards_mod
35from nastranio.constants import BULK, ELEMENT, PROPERTY, shapes
36from nastranio.decorators import cached_property, profile, timeit
37from nastranio.utils import array2dic, bunch, dic2array
39try:
40 import networkx as nx
42 HAS_NX = True
43except ImportError:
44 HAS_NX = False
46GMSH_STR_SEP = "#"
49def dedup(seq):
50 seen = set()
51 seen_add = seen.add
52 return [x for x in seq if not (x in seen or seen_add(x))]
55class Mesh:
56 """
57 API to operate on a Registry records
58 """
60 def _list_caches(self):
61 return {k for k in self.__dict__ if k.startswith(cached_property.CACHED_PREFIX)}
63 def clear_caches(self, rebuild=True):
64 """delete internal caches"""
65 prefix = cached_property.CACHED_PREFIX
66 cache_names = self._list_caches() # _cache_XXX
67 cached_names = [k.replace(prefix, "") for k in cache_names] # XXX
68 logging.info("clean cached properties: %s", ", ".join(cached_names))
69 for fcached, cache_name in zip(cached_names, cache_names):
70 self.__dict__.pop(cache_name)
71 # ---------------------------------------------------------------------
72 # rebuilding
73 _rebuilt = []
74 if rebuild:
75 for fcached, cache_name in zip(cached_names, cache_names):
76 # fcached = cached_name.replace(prefix, "")
77 getattr(self, fcached)
78 _rebuilt.append(fcached)
79 if _rebuilt:
80 logging.info("rebuilt cache: %s", ", ".join(_rebuilt))
82 def set_registry(self, registry, calc_csys=False):
83 """Bind a registry to the Mesh instance"""
84 self.reg = registry
85 self.CSys = None
86 # build coordinate systems
87 if calc_csys:
88 self.calc_csys()
90 def calc_csys(self):
91 self.CSys = CSysReg(minid=0)
92 # create CSYS0
93 self.CSys.new(
94 id=0,
95 origin=(0, 0, 0),
96 axis=[(1, 0, 0), (0, 1, 0), (0, 0, 1)],
97 title="Reference Rectangular Coordinate System",
98 labels=("X1", "X2", "X3"),
99 )
100 if "CORD2R" not in self.reg.container["bulk"]:
101 # no defined CSys
102 return
103 arr = self.reg.container["bulk"]["CORD2R"].array
104 # --------------------------------------------------------------------
105 # sort CSYS by reference IDs to create CSYS in the correct order
106 df = pd.DataFrame(arr).set_index(["RID", "CID"]).sort_index()
108 titles = self.reg.comments().get("Coordinate System", {})
109 for (rid, cid), points in df.iterrows():
110 self.CSys.new(
111 id=cid,
112 reference_id=rid,
113 points=(
114 (points["A1"], points["A2"], points["A3"]),
115 (points["B1"], points["B2"], points["B3"]),
116 (points["C1"], points["C2"], points["C3"]),
117 ),
118 title=titles.get(cid, "Rectangular Coordinate System"),
119 labels=("X1", "X2", "X3"),
120 )
122 def cquad4_axis(self, digits=5):
123 # ====================================================================
124 # ⚠ NOTE ⚠
125 # --------------------------------------------------------------------
126 # WORK IN PROGRESS
127 # --------------------------------------------------------------------
128 # nic@alcazar -- dimanche 1 décembre 2019, 09:00:07 (UTC+0100)
129 # mercurial: fbd21002395c+ tip
130 # ====================================================================
131 grids = self.reg.container[BULK.title]["GRID"]
132 grids_df = grids.coords(asdf=True)
133 cardobj = self.reg.CQUAD4
134 eids = cardobj.eid2gids(asdf=True).stack()
135 eids.index.names = ["EID", "GID"]
136 eids.name = "gid"
137 eids = eids.to_frame()
138 df = (
139 pd.merge(eids, grids_df, right_index=True, left_on="gid")
140 .sort_index()
141 .unstack(level=-1)[["X", "Y", "Z"]]
142 .swaplevel(axis=1)
143 .T.sort_index()
144 .T[cardobj.gids_header]
145 )
147 _G1G2 = df["G2"].values - df["G1"].values
148 _G1G3 = df["G3"].values - df["G1"].values
150 # --------------------------------------------------------------------
151 # calculate Z (normal)
152 Z = np.round(vg.normalize(vg.cross(_G1G2, _G1G3)), digits)
153 df[[("Zelem", "X"), ("Zelem", "Y"), ("Zelem", "Z")]] = pd.DataFrame(
154 Z, index=df.index
155 )
156 # --------------------------------------------------------------------
157 # calculate X
158 _G2G4 = df["G4"].values - df["G2"].values
159 df[[("_G2G4", "X"), ("_G2G4", "Y"), ("_G2G4", "Z")]] = pd.DataFrame(
160 _G2G4, index=df.index
161 )
163 β = vg.angle(_G1G2, _G1G3)
164 γ = vg.angle(_G2G4, -_G1G2)
165 α = (β + γ) / 2
166 df["α"] = α
167 gps = df.groupby([("α", ""), ("Zelem", "X"), ("Zelem", "Y"), ("Zelem", "Z")])
168 # "_gpid" identify group of processing
169 df["_gpid"] = gps.ngroup()
170 Xs = {}
171 for (angle, *axis), _df in gps:
172 key = set(_df._gpid)
173 assert len(key) == 1
174 key = next(iter(key))
175 Xs[key] = vg.normalize(
176 vg.rotate(_df._G2G4.values, around_axis=np.array(axis), angle=angle)
177 )
178 # X = pd.DataFrame(
179 # vg.rotate(-df._G2G4.values, Z.values, df['α'].values), index=df.index
180 # )
181 # df[[('Xelem', 'X'), ('Xelem', 'Y'), ('Xelem', 'Z')]] = X
182 return df, Xs
184 # ========================================================================
185 # coords
186 # ========================================================================
188 def coords(self, incsys=None, eids=None, gids=None, asdf=False):
189 """
190 Return coordinates of all or subset of grid points.
192 :param incsys: Coordinate System to output coordinates
193 :type incsys: None or integer
194 :param eids: Elements IDs providing a subset of grid points IDs
195 :type eids: Iterable of integers
196 :param gids: iterable of grid poit IDs to query coordinates
197 :type gids: Iterable of integers
198 :param asdf: "As DataFrame". If True, return a pandas DataFrame
199 :type asdf: bool
201 :returns: either a tuple (gids, coordinates, coordinates system IDs) or pandas DataFrame.
203 if `eids` and `gids` are provided, `gids` will be ignored.
205 Proxy method to `loading.GRIDS.coords()`
207 >>> gids, coords, csys = reg.mesh.coords()
208 >>> gids
209 array([ 1, 2, ..., 12516, 12517])
210 >>> coords
211 array([[374.79303586, -47.79179422, 14.66539227],
212 [374.793 , -46.965 , 14.6654 ],
213 ...,
214 [372.45 , -42.28 , 0. ],
215 [363.95 , -42.28 , 0. ]])
216 >>> csys
217 array([0, 0, 0, ..., 0, 0, 0])
220 Providing `gids` to get selected coordinates:
222 >>> gids, coords, csys = reg.mesh.coords(gids=((1,12517)))
223 >>> gids
224 array([ 1, 12517])
225 >>> coords
226 array([[374.79303586, -47.79179422, 14.66539227],
227 [363.95 , -42.28 , 0. ]])
228 >>> csys
229 array([0, 0])
232 Providing `eids` to automatically select grid points, and requesting
233 a pandas dataframe:
235 >>> reg.mesh.coords(eids=((1,)), asdf=True)
236 csys X Y Z
237 gid
238 5 0 375.856 -47.4020 14.6654
239 12 0 375.686 -47.6245 14.6654
240 13 0 375.331 -47.3115 14.6654
241 15 0 375.216 -47.5571 14.6654
243 """
244 gridcards = self.reg.container[BULK.title]["GRID"]
245 if eids:
246 gids = self.eid2gids(eids=eids, asbunch=True)
247 if incsys is not None:
248 if self.CSys is None:
249 self.calc_csys()
250 csysreg = self.CSys
251 else:
252 csysreg = None
253 coords = gridcards.coords(incsys=incsys, csysreg=csysreg, gids=gids, asdf=asdf)
254 return coords
256 # ========================================================================
257 # normals
258 # ========================================================================
260 def normals(self, eids=None, strict=False, digits=3, silent_warnings=True):
261 """return normalized normals for 1D and 2D elements as pandas dataframe.
262 Elements IDs can optionally be provided to select a subset of elements.
264 :param eids: IDs to calculate normals
265 :type eids: iterable of integers
266 :param strict: Wether or not raise an exception if an invalid element ID is
267 provided
268 :type strict: bool
269 :param digits: number of digits to keep after rounding
270 :type digits: int
272 :returns: pandas DataFrame
274 In the following examples, element #203 doesn't exist. By default, invalid
275 elements are silently skipped.
277 >>> reg.mesh.normals(eids=(1, 2, 203))
278 X Y Z
279 1 0.0 0.0 -1.0
280 2 -0.0 0.0 -1.0
281 >>> # Using `strict=True` will raise an exception:
282 >>> reg.mesh.normals(eids=(1, 2, 203), strict=True)
283 Traceback (most recent call last):
284 ...
285 ValueError: elements IDs {203} doesn`t exist or doesn`t have a normal.
287 For 1D elements, the `normal` is set to be the GA-GB vector. This makes handy
288 to calculate the angle between a 1D element and a 2D element.
290 In the following example, CBUSHes #11137 and 11138 have coincidents points.
292 >>> cbushes_id = reg.mesh.eids_by_cards(('CBUSH',))
293 >>> cbushes_id
294 {11137, 11138, ..., 11175, 11176}
295 >>> reg.mesh.normals(cbushes_id)
296 X Y Z
297 11137 NaN NaN NaN
298 11138 NaN NaN NaN
299 ...
300 11175 0.0 0.0 -1.0
301 11176 0.0 0.0 -1.0
303 """
304 if silent_warnings:
305 warnings.simplefilter("ignore")
306 nn = self._elems_normal.copy()
307 nn["data"] = np.round(nn["data"], digits)
308 df = pd.DataFrame(**nn)
309 if eids:
310 # check if provided eids are OK (are they referring to 2D elems?)
311 eids = set(eids)
312 allowed = set(df.index)
313 wrong_eids = eids - allowed
314 if wrong_eids:
315 msg = (
316 "elements IDs %s doesn`t exist or doesn`t have a normal."
317 % wrong_eids
318 )
320 if strict:
321 raise ValueError(msg)
322 else:
323 logging.warning(msg)
324 eids = eids & allowed
325 df = df.loc[list(eids)]
327 return df
329 def geom_1d_elements(self, eids=None, cardnames=None):
330 """
331 >>> reg.mesh.geom_1d_elements().round(1)
332 GA GB XA YA ZA XB YB ZB GAGBx GAGBy GAGBz
333 eid
334 11137 7799 12516 372.4 -42.3 0.0 372.4 -42.3 0.0 0.0 0.0 0.0
335 11138 7800 12517 364.0 -42.3 0.0 364.0 -42.3 0.0 0.0 0.0 0.0
336 ...
337 """
338 coords = self.coords(incsys=0, asdf=True)[["X", "Y", "Z"]].copy()
339 # select lineic elements
340 arrays = []
341 elems_cards = self.reg.summary["line"] # 1D without RBE
342 if not cardnames:
343 cardobjs = self.reg.container[BULK.title]
344 else:
345 cardobjs = {
346 cardname: cardobj
347 for cardname, cardobj in self.reg.container[BULK.title].items()
348 if cardname in cardnames
349 }
350 for cardname, cardobj in cardobjs.items():
351 if cardname not in elems_cards:
352 continue
353 arrays.append(cardobj.array[[cardobj.XID_FIELDNAME] + cardobj.gids_header])
355 array = np.hstack(arrays)
356 df = pd.DataFrame(array).set_index("EID") # drop cardname information
357 df = df.merge(coords, left_on="GA", right_index=True).merge(
358 coords, left_on="GB", right_index=True, suffixes=("A", "B")
359 )
360 df["GAGBx"] = df["XB"] - df["XA"]
361 df["GAGBy"] = df["YB"] - df["YA"]
362 df["GAGBz"] = df["ZB"] - df["ZA"]
363 df.index.names = ["eid"]
364 if eids:
365 df = df.loc[eids]
366 return df
368 @cached_property
369 def _geom_1d_elements_legacy(self):
370 """return 4-items tuple:
371 eids np.Array (n,), gagb np.Array(n, 3), GA coords (n, 3), GB Coords (n,3)
372 """
374 gids, coords, csys = self.coords(incsys=0)
375 eids1d = []
376 gids1d = []
377 # select lineic elements
378 elems_cards = self.reg.summary["line"] # 1D without RBE
379 for cardname, cardobj in self.reg.container[BULK.title].items():
380 if cardname not in elems_cards:
381 continue
382 # eids1d = cardobj.eid2gids1d(asdf=True).stack()
383 _eids1d = cardobj.eid2gids(keep_order=True)
384 breakpoint()
385 eids1d.append(list(_eids1d.keys()))
386 gids1d.append(list(_eids1d.values()))
387 # --------------------------------------------------------------------
388 # keep the three first nodes only (enough to calculate normals)
389 for i, _gids1d in enumerate(gids1d):
390 _gids1d = np.array(_gids1d)
391 gids1d[i] = _gids1d
392 if eids1d:
393 eids1d = np.concatenate(eids1d)
394 # [eid1, eid2, ...] -> [eid1, eid1, eid1, eid2, eid2, eid2, ...]
395 gids1d = np.concatenate(gids1d)
396 # [[G1, G2, G3], [G1, G2, G3],...] -> [G11, G12, G13, G21, G22, G23, ...]
398 # coordinates of nodes G1, G2 and G3 for eids1d elements
399 GA = coords[loc_array(gids, gids1d[:, 0])]
400 GB = coords[loc_array(gids, gids1d[:, 1])]
402 ret = {"index": eids1d, "data": np.hstack((GA, GB, GB - GA))}
403 else:
404 ret = {
405 "index": [],
406 "data": [],
407 }
408 ret["columns"] = [
409 "GAx",
410 "GAy",
411 "GAz",
412 "GBx",
413 "GBy",
414 "GBz",
415 "GAGBx",
416 "GAGBy",
417 "GAGBz",
418 ]
420 df = pd.DataFrame(**ret)
421 df.index.names = ["eid"]
422 return df
424 @cached_property
425 def _elems_normal(self):
426 """calculate and return 2D elements normals"""
427 gids, coords, csys = self.coords(incsys=0)
428 dfs = []
429 # ====================================================================
430 # 2D elements
431 # ====================================================================
432 eids2d = []
433 gids2d = []
434 # cquad4 / ctria3
435 elems_cards = self.reg.summary["2d"]
436 for cardname, cardobj in self.reg.container[BULK.title].items():
437 if cardname not in elems_cards:
438 continue
439 # eids2d = cardobj.eid2gids2d(asdf=True).stack()
440 _eids2d = cardobj.eid2gids(keep_order=True)
441 eids2d.append(list(_eids2d.keys()))
442 gids2d.append(list(_eids2d.values()))
443 # --------------------------------------------------------------------
444 # keep the three first nodes only (enough to calculate normals)
445 for i, _gids2d in enumerate(gids2d):
446 _gids2d = np.array(_gids2d)[:, :3]
447 gids2d[i] = _gids2d
448 if eids2d:
449 eids2d = np.concatenate(eids2d)
450 # [eid1, eid2, ...] -> [eid1, eid1, eid1, eid2, eid2, eid2, ...]
451 gids2d = np.concatenate(gids2d)
452 # [[G1, G2, G3], [G1, G2, G3],...] -> [G11, G12, G13, G21, G22, G23, ...]
454 # coordinates of nodes G1, G2 and G3 for eids2d elements
455 G1s = coords[loc_array(gids, gids2d[:, 0])]
456 G2s = coords[loc_array(gids, gids2d[:, 1])]
457 G3s = coords[loc_array(gids, gids2d[:, 2])]
459 Z2d = vg.normalize(vg.cross((G2s - G1s), (G3s - G1s)))
461 else:
462 eids2d, Z2d = None, None
463 # ====================================================================
464 # 1D elements
465 # ====================================================================
466 eids1d = []
467 gids1d = []
468 # select lineic elements
469 elems_cards = self.reg.summary.get("line", ()) # 1D without RBE
470 for cardname, cardobj in self.reg.container[BULK.title].items():
471 if cardname not in elems_cards:
472 continue
473 # eids1d = cardobj.eid2gids1d(asdf=True).stack()
474 _eids1d = cardobj.eid2gids(keep_order=True)
475 eids1d.append(list(_eids1d.keys()))
476 gids1d.append(list(_eids1d.values()))
477 # --------------------------------------------------------------------
478 # keep the three first nodes only (enough to calculate normals)
479 for i, _gids1d in enumerate(gids1d):
480 _gids1d = np.array(_gids1d)[:, :2]
481 gids1d[i] = _gids1d
482 if eids1d:
483 eids1d = np.concatenate(eids1d)
484 # [eid1, eid2, ...] -> [eid1, eid1, eid1, eid2, eid2, eid2, ...]
485 gids1d = np.concatenate(gids1d)
486 # [[G1, G2, G3], [G1, G2, G3],...] -> [G11, G12, G13, G21, G22, G23, ...]
488 # coordinates of nodes G1, G2 and G3 for eids1d elements
489 G1s = coords[loc_array(gids, gids1d[:, 0])]
490 G2s = coords[loc_array(gids, gids1d[:, 1])]
492 Z1d = vg.normalize(G2s - G1s)
493 else:
494 eids1d, Z1d = None, None
495 # ====================================================================
496 # merge 1D and 2D
497 # ====================================================================
498 if eids1d is not None and eids2d is not None:
499 eids = np.hstack((eids1d, eids2d))
500 Z = np.vstack((Z1d, Z2d))
501 elif eids1d is not None:
502 eids = eids1d
503 Z = Z1d
504 else:
505 eids = eids2d
506 Z = Z2d
507 return {"index": eids, "data": Z, "columns": ["X", "Y", "Z"]}
509 def get_cbars_y_vector(self):
510 """return a dataframe like:
511 X1 X2 X3
512 eid
513 3 1.0 0.0 0.0
514 4 1.0 0.0 0.0
515 6 1.0 0.0 0.0
516 ...
517 """
518 CBARS = self.reg.container["bulk"]["CBAR"]
519 df = pd.DataFrame(CBARS.array)
521 dfGO = df.dropna(subset=["GO"])
522 if len(dfGO) > 0:
523 raise NotImplementedError(
524 "cannot calculate Y vector for elements defined with GO"
525 )
526 df = df[["EID", "X1", "X2", "X3"]].set_index("EID")
527 df.index.names = ["eid"]
528 return df
530 # ========================================================================
531 # area
532 # ========================================================================
533 @cached_property
534 def _area_tria(self):
535 """return area for triangular elements as a dict
536 ready to be transformed in pandas Series
537 """
538 # --------------------------------------------------------------------
539 # Triangles area are easy: (G1G2 ^ G1G3) / 2
540 eids = sorted(list(self.eids_by_shape(shapes.TRIA)))
541 if not eids:
542 return {"index": [], "data": [], "name": "area"}
543 gids, coords, csys = self.coords(incsys=0, eids=eids, asdf=False)
544 # _coords = self.coords(incsys=0, eids=eids, asdf=True)
545 eid2gids = self.eid2gids(eids=eids, keep_order=True)
546 _eids, _gids = (
547 np.array(list(eid2gids.keys())),
548 np.array(list(eid2gids.values())),
549 )
550 G1s = coords[loc_array(gids, _gids[:, 0])]
551 G2s = coords[loc_array(gids, _gids[:, 1])]
552 G3s = coords[loc_array(gids, _gids[:, 2])]
554 areas = np.linalg.norm(vg.cross((G2s - G1s), (G3s - G1s)), axis=1) / 2
555 return {"index": _eids, "data": areas, "name": "area"}
556 # quads_cards = self.reg.summary.get(shapes.QUAD)
558 @cached_property
559 def _area_quad(self):
560 """return area for triangular elements as a dict
561 ready to be transformed in pandas Series
562 """
563 # --------------------------------------------------------------------
564 # Triangles area are easy: (G1G2 ^ G1G3) / 2
565 eids = sorted(list(self.eids_by_shape(shapes.QUAD)))
566 if not eids:
567 return {"index": [], "data": [], "name": "area"}
568 gids, coords, csys = self.coords(incsys=0, eids=eids, asdf=False)
569 # _coords = self.coords(incsys=0, eids=eids, asdf=True)
570 eid2gids = self.eid2gids(eids=eids, keep_order=True)
571 _eids, _gids = (
572 np.array(list(eid2gids.keys())),
573 np.array(list(eid2gids.values())),
574 )
575 G1s = coords[loc_array(gids, _gids[:, 0])]
576 G2s = coords[loc_array(gids, _gids[:, 1])]
577 G3s = coords[loc_array(gids, _gids[:, 2])]
578 G4s = coords[loc_array(gids, _gids[:, 3])]
580 a1 = np.linalg.norm(vg.cross((G2s - G1s), (G3s - G1s)), axis=1) / 2
581 a2 = np.linalg.norm(vg.cross((G3s - G1s), (G4s - G1s)), axis=1) / 2
582 return {"index": _eids, "data": a1 + a2, "name": "area"}
583 # quads_cards = self.reg.summary.get(shapes.QUAD)
585 def get_all_eids(self, cards=None):
586 """return all elements IDs as a set"""
587 eids = set()
588 all_cards = self.reg.container["summary"]["element"].copy()
589 if not cards:
590 cards = set()
591 searched_cards = all_cards - cards
592 for cardname in searched_cards:
593 card = self.reg.container["bulk"][cardname]
594 eids |= set(card.carddata["main"]["EID"])
595 return eids
597 @cached_property
598 def _area(self):
599 tri = self._area_tria
600 quad = self._area_quad
601 # all others elements have a null area
602 all_eids = set(self.eid2gids().keys())
603 null_ix = np.array(
604 sorted(list(all_eids - (set(tri["index"]) | set(quad["index"]))))
605 )
606 x = np.arange(len(null_ix), dtype="float64")
607 null_values = np.full_like(x, np.nan)
609 return {
610 "index": np.hstack((tri["index"], quad["index"], null_ix)),
611 "data": np.hstack((tri["data"], quad["data"], null_values)),
612 "name": "area",
613 }
615 def area(self, eids=None):
616 """
617 return areas of optinally provided elements IDs
618 """
619 areas = self._area.copy()
620 if eids:
621 eids = np.array(list(eids))
622 ix = loc_array(areas["index"], eids)
623 index = areas["index"][ix]
624 data = areas["data"][ix]
625 areas.update({"index": index, "data": data})
626 return areas
628 # ========================================================================
629 # lengths
630 # ========================================================================
631 @cached_property
632 def _length(self):
633 """calcualte lengths for 2D elements and single-leg RBE*"""
634 eids1d = []
635 gids1d = []
636 # --------------------------------------------------------------------
637 # first, line lements (therefore excluding RBE*)
638 _processed = self.reg.summary[shapes.LINE]
639 cardnames = self.reg.summary[shapes.LINE]
640 for cardname, cardobj in self.reg.container[BULK.title].items():
641 if cardname not in cardnames:
642 continue
643 _eids1d = cardobj.eid2gids(keep_order=True)
644 if not _eids1d:
645 continue
646 eids1d.append(list(_eids1d.keys()))
647 gids1d.append(np.array(list(_eids1d.values())))
648 # --------------------------------------------------------------------
649 # mono-legs RBE*
650 cardnames = self.reg.summary["1d"]
651 cardnames = cardnames - _processed # remove LINE elements
652 for cardname, cardobj in self.reg.container[BULK.title].items():
653 if cardname not in cardnames:
654 continue
655 _eids1d = {
656 k: v
657 for k, v in cardobj.eid2gids(keep_order=True).items()
658 if len(v) == 2
659 } # mono-legs RBE*
660 if not _eids1d:
661 continue
662 gids1d.append(np.array(list(_eids1d.values())))
663 eids1d.append(list(_eids1d.keys()))
664 gids1d = np.vstack(gids1d)
665 eids1d = np.hstack(eids1d)
666 # --------------------------------------------------------------------
667 # get coordinates
668 gids, coords, csys = self.coords(incsys=0)
669 G1s = coords[loc_array(gids, gids1d[:, 0])]
670 G2s = coords[loc_array(gids, gids1d[:, 1])]
672 lengths = vg.magnitude(G2s - G1s)
674 return {"index": eids1d, "data": lengths, "name": "length"}
676 def length(self, eids=None):
677 """
678 Return lengths for provided elements. If no elements are provided
679 (``eids=None``), return the lengths for any 1D elements.
681 Mono-legs RBE* are included.
683 :param eids: *optional* element IDs to investigate.
684 :type eids: sequence of integers
686 :returns: lengths data dictionnary ready to be transformed in pandas Series
688 >>> reg.mesh.length(eids=(11137, 11138))
689 {'index': array([11137, 11138]), 'data': array([0., 0.]), 'name': 'length'}
690 """
691 lengths = self._length.copy()
692 if eids:
693 eids = np.array(list(eids))
694 ix = loc_array(lengths["index"], eids)
695 index = lengths["index"][ix]
696 data = lengths["data"][ix]
697 lengths.update({"index": index, "data": data})
698 return lengths
700 # ========================================================================
701 # thicknesses
702 # ========================================================================
703 @cached_property
704 def _thk(self):
705 """return a pandas Series {eid: thk}"""
706 # --------------------------------------------------------------------
707 # get thicknesses defined in the elements
708 elems_cards = self.reg.summary["2d"]
709 ix = []
710 data = []
711 for cardname, cardobj in self.reg.container[BULK.title].items():
712 if cardname not in elems_cards:
713 continue
714 _data = cardobj.thk
715 ix.append(_data["index"])
716 data.append(_data["data"])
717 if not data:
718 data = {"data": [], "index": [], "name": "elem_thk"}
719 else:
720 # prepare a series to make merge easier
721 data = {"data": np.hstack(data), "index": np.hstack(ix), "name": "elem_thk"}
722 eid2thk = pd.Series(**data, dtype="float64")
723 # --------------------------------------------------------------------
724 # get thicknesses defined by properties
725 pid2thk = pd.Series(self.pid2thk(), name="prop_thk", dtype="float64")
726 eid2pid = pd.Series(self.eid2pid(), name="pid")
727 thks = pd.merge(eid2pid, pid2thk, left_on="pid", right_index=True, how="left")
728 thks = thks.merge(eid2thk, left_index=True, right_index=True, how="left")
729 return thks[["prop_thk", "elem_thk"]].max(axis=1).sort_index()
731 def thk(self, eids=None, asdict=True):
732 """return a mapping {eid: thk}
734 >>> reg.mesh.thk(eids=(24, 27, 4, 35, 36, 37, 33, -1))
735 {24: 0.09, 27: 0.09, 4: 0.09, 35: 0.09, 36: 0.09, 37: 0.09, 33: 0.09, -1: nan}
736 """
737 _thk = self._thk.copy()
738 if eids:
739 _thk = _thk.reindex(list(eids))
740 if asdict:
741 _thk = _thk.to_dict()
742 return _thk
744 # ========================================================================
745 # gid2XXX
746 # ========================================================================
748 @cached_property
749 def _gid2eids(self):
750 """
751 first part of gid2eids. Split to cache intermediate results
752 """
753 _gid2eids = defaultdict(set)
754 for eid, _gids in self.eid2gids().items():
755 for gid in _gids:
756 _gid2eids[gid].add(eid)
758 return {gid: frozenset(eids) for gid, eids in _gid2eids.items()}
760 def gid2eids(self, gids=None, asbunch=False):
761 """
762 return a dictionnary with all gids as keys, and associated eids set.
764 :param gids: optional subset of grid point IDs to check.
765 :type gids: iterable of integers.
767 :returns: dictionnary
769 >>> reg.mesh.gid2eids()
770 {13: frozenset({8, 1, 6, 7}), 12: frozenset({1, 2, 4, 9}), ...}
771 >>> # A set of grid points can optionally be provided:
772 >>> reg.mesh.gid2eids(gids=(1, 17))
773 {1: frozenset({8, 9, 12}), 17: frozenset({10, 14})}
774 >>> reg.mesh.gid2eids(gids=(1, 17), asbunch=True)
775 frozenset({8, 9, 10, 12, 14})
776 """
777 # --------------------------------------------------------------------
778 # test
779 # if asbunch:
780 # eids = self._meetic[['gid', 'eid1', 'eid2']].set_index('gid')
781 # if gids:
782 # eids = eids.loc[list(gids)]
783 # return frozenset(set(eids.eid1) | set(eids.eid2))
784 # # end of test
785 # --------------------------------------------------------------------
786 _gid2eids = self._gid2eids.copy()
787 if gids:
788 _gid2eids = {gid: _gid2eids[gid] for gid in gids}
789 if asbunch:
790 return bunch(_gid2eids)
791 return _gid2eids
793 def next_unused_gid(self):
794 """return next free (unused) node ID, not using caches"""
795 all_gids = self.reg.container["bulk"]["GRID"].carddata["main"]["ID"]
796 return max(all_gids) + 1
798 def next_unused_eid(self):
799 """return next free (unused) element ID, not using caches"""
800 cards = self.reg.container["summary"]["element"]
801 all_eids = set()
802 for cardname in cards:
803 card = self.reg.container["bulk"][cardname]
804 all_eids |= set(card.carddata["main"]["EID"])
805 return max(all_eids) + 1
807 # ========================================================================
808 # card2XXX
809 # ========================================================================
810 @cached_property
811 def _card2eids(self):
812 ret = defaultdict(set)
813 # to make docstrings more reliable, sort keys
814 cards = sorted(list(self.reg.summary["element"]))
815 for cardname in cards:
816 card = self.reg.bulk[cardname]
817 ret[cardname] = set(card.carddata["main"][card.EID_FIELDNAME])
818 return dict(ret)
820 def card2eids(self, cards=None):
821 """
822 return a dict mapping cardname to eids
824 >>> reg.mesh.card2eids()
825 {'CBUSH': {11137, ...}, 'CQUAD4': {1, ...}, 'CTRIA3': {5121, ...}, ...}
826 >>> reg.mesh.card2eids(cards=('CQUAD4', 'CTRIA3'))
827 {'CQUAD4': {1, ...}, 'CTRIA3': {5121, ...}
828 """
829 card2eids = self._card2eids.copy()
830 if not cards:
831 return card2eids
832 return {k: v for k, v in card2eids.items() if k in cards}
834 # ========================================================================
835 # eid2XXX
836 # ========================================================================
838 @cached_property
839 def _eid2gids_ordered(self):
840 """
841 first part of eid2gids(keep_order=True).
842 Split to cache intermediate results"""
843 _eid2gids = {}
844 elems_cards = self.reg.summary["element"]
845 for cardname, cardobj in self.reg.container[BULK.title].items():
846 if cardname not in elems_cards:
847 continue
848 _eid2gids.update(cardobj.eid2gids(keep_order=True)) #
849 return dict(_eid2gids)
851 @cached_property
852 def _eid2gids(self):
853 """
854 first part of eid2gids(keep_order=False, asbunch=False).
855 Split to cache intermediate results"""
856 _eid2gids = {}
857 elems_cards = self.reg.summary["element"]
858 for cardname, cardobj in self.reg.container[BULK.title].items():
859 if cardname not in elems_cards:
860 continue
861 _eid2gids.update(cardobj.eid2gids(keep_order=False)) #
862 return dict(_eid2gids)
864 @cached_property
865 def _eid2gids_asbunch(self):
866 """
867 first part of eid2gids(keep_order=False, asbunch=True).
868 Split to cache intermediate results"""
869 _gids = {}
870 elems_cards = self.reg.summary["element"]
871 for cardname, cardobj in self.reg.container[BULK.title].items():
872 if cardname not in elems_cards:
873 continue
874 _gids |= cardobj.eid2gids(keep_order=False)
875 return _gids
877 def eid2data(self, eids=None):
878 """return a pandas DataFrame with misc data, sorted by `eid`:
880 >>> reg.mesh.eid2data()
881 card dim pid length area thk volume const
882 eid
883 1 CQUAD4 2d 1 NaN 0.127690 0.09 0.011492 False
884 2 CQUAD4 2d 1 NaN 0.176163 0.09 0.015855 False
885 3 CQUAD4 2d 1 NaN 0.248014 0.09 0.022321 False
886 4 CQUAD4 2d 1 NaN 0.130147 0.09 0.011713 False
887 5 CQUAD4 2d 1 NaN 0.330996 0.09 0.029790 False
888 ... ... .. ... ... ... ... ... ...
889 12845 RBE2 1d None 0.0 NaN NaN NaN False
890 12846 RBE2 1d None 0.0 NaN NaN NaN False
891 12847 RBE2 1d None 0.0 NaN NaN NaN False
892 12986 RBE3 1d None NaN NaN NaN NaN False
893 12987 RBE3 1d None NaN NaN NaN NaN False
894 <BLANKLINE>
895 [4246 rows x 8 columns]
896 """
897 eid2data = self._eid2data.copy()
898 if eids:
899 eid2data = eid2data.loc[list(eids)]
900 return eid2data
902 @cached_property
903 def _eid2data(self):
904 """return a pandas DataFrame with misc data, sorted by `eid`:
906 >>> reg.mesh.eid2data
907 card dim pid
908 eid
909 1 CQUAD4 2d 1
910 2 CQUAD4 2d 1
911 ... ... .. ...
912 12986 RBE3 1d None
913 12987 RBE3 1d None
914 <BLANKLINE>
915 [4246 rows x 3 columns]
917 """
918 ret = []
919 cards = self.reg.summary[ELEMENT]
920 for cardname in cards:
921 _ret = {}
922 card = self.reg.bulk[cardname]
923 _carddata = dict(
924 eid=card.array[card.EID_FIELDNAME].tolist(),
925 card=[cardname] * len(card),
926 dim=[card.dim] * len(card),
927 # gmsh_eltype=[card.gmsh_eltype] * len(card),
928 # shape=[card.shape] * len(card)
929 )
930 # pids are not just a repetition
931 if hasattr(card, "PID_FIELDNAME"):
932 pid = card.array[card.PID_FIELDNAME].tolist()
933 else:
934 pid = [None] * len(card)
935 _carddata["pid"] = pid
936 _ret = pd.DataFrame(_carddata).set_index("eid")
937 ret.append(_ret)
938 df = pd.concat(ret).sort_index()
939 # --------------------------------------------------------------------
940 # area and lengths
941 df["length"] = pd.Series(**self.length())
942 df["area"] = pd.Series(**self.area())
943 try:
944 df["thk"] = pd.Series(self.thk())
945 except:
946 df["thk"] = 0.0
947 df["volume"] = df.area * df.thk
948 # --------------------------------------------------------------------
949 # boundaries
950 gid2eids = self.gid2eids(self.boundaries.index.tolist())
951 const_eids = [eid for eid in chain.from_iterable(gid2eids.values())]
952 df["const"] = False
953 df.loc[const_eids, "const"] = True
954 return df
956 def eid2gids(
957 self, eids=None, dim=None, cards=None, keep_order=False, asbunch=False
958 ):
959 """
960 return a dictionnary with all eids as keys, and associated gids set
961 """
962 # --------------------------------------------------------------------
963 # define which property to use
964 if asbunch:
965 if keep_order:
966 raise ValueError("`keep_order` is incompatible with `asbunch`")
967 else:
968 _eid2gids = self._eid2gids # conversion to set will occur later on
969 else:
970 if keep_order:
971 _eid2gids = self._eid2gids_ordered.copy()
972 else:
973 _eid2gids = self._eid2gids.copy()
974 # --------------------------------------------------------------------
975 # pre-select elements IDs
976 if dim:
977 eids = self.eids_by_dim(dim)
978 elif cards:
979 eids = self.eids_by_cards(cards)
980 # --------------------------------------------------------------------
981 # trigger property
982 if eids:
983 _eid2gids = {eid: _eid2gids.get(eid) for eid in eids}
984 if asbunch:
985 return bunch(_eid2gids)
986 # _gids = set()
987 # for gids in _eid2gids.values():
988 # _gids |= gids
989 # return _gids
990 return _eid2gids
992 def get_eid_cardname(self, eid):
993 """non-cached cardname searcher"""
994 for cardname in self.reg.summary[ELEMENT]:
995 card = self.reg.bulk[cardname]
996 eids = card.carddata["main"][card.EID_FIELDNAME]
997 if eid in eids:
998 return cardname
1000 @cached_property
1001 def _eid2card(self):
1002 ret = {}
1003 cards = sorted(list(self.reg.summary[ELEMENT]))
1004 for cardname in cards:
1005 card = self.reg.bulk[cardname]
1006 eids = card.carddata["main"][card.EID_FIELDNAME]
1007 ret.update(dict(zip(eids, [cardname] * len(eids))))
1008 return ret
1010 def eid2card(self, eids=None, cards=None, asbunch=False, skipcache=False):
1011 """
1012 return a mapping {eid: cardname}
1014 :param eids: element IDs to filter
1015 :type eids: iterable of integers
1016 :param cards: cards to filter
1017 :type cards: iterable of valid card names
1019 :returns: dictionnary mapping element IDs to cardname
1021 >>> reg.mesh.eid2card()
1022 {11137: 'CBUSH', ..., 3688: 'CTRIA3', ...}
1023 >>> reg.mesh.eid2card(eids=(11137, 11139, 3689))
1024 {11137: 'CBUSH', 11139: 'CBUSH', 3689: 'CQUAD4'}
1025 >>> s = reg.mesh.eid2card(eids=(11137, 11139, 3689), asbunch=True)
1026 >>> s == frozenset({'CBUSH', 'CQUAD4'})
1027 True
1028 >>> reg.mesh.eid2card(eids=(11137, 11139, 3689), cards=('CQUAD4', 'CTRIA3'))
1029 {3689: 'CQUAD4'}
1031 """
1032 if skipcache:
1033 _d = {}
1034 _cards = sorted(list(self.reg.summary[ELEMENT]))
1035 for _cardname in _cards:
1036 _card = self.reg.bulk[_cardname]
1037 _eids = _card.carddata["main"][_card.EID_FIELDNAME]
1038 _d.update(dict(zip(_eids, [_cardname] * len(_eids))))
1039 else:
1040 _d = self._eid2card.copy()
1041 if eids:
1042 _d = {eid: card for eid, card in _d.items() if eid in eids}
1043 if cards:
1044 _d = {eid: card for eid, card in _d.items() if card in cards}
1045 if asbunch:
1046 return frozenset(_d.values())
1047 return _d
1049 @cached_property
1050 def _eid2dim(self):
1051 ret = {}
1052 cards = sorted(list(self.reg.summary[ELEMENT]))
1053 for cardname in cards:
1054 card = self.reg.bulk[cardname]
1055 eids = card.carddata["main"][card.EID_FIELDNAME]
1056 ret.update(dict(zip(eids, [card.dim] * len(eids))))
1057 return ret
1059 def eid2dim(self, eids=None, dims=None, asbunch=False):
1060 """
1061 return a mapping {eid: dim}
1063 :param eids: element IDs to filter
1064 :type eids: iterable of integers
1065 :param dims: dimensions to filter. (eg. '1d', '2d', '0d', etc...)
1066 :type dims: iterable of valid dimentsions names
1068 :returns: dictionnary mapping element IDs to dimension
1070 >>> reg.mesh.eid2dim()
1071 {11137: '1d', ..., 3688: '2d', ...}
1072 >>> reg.mesh.eid2dim(eids=(11137, 11139, 3689))
1073 {11137: '1d', 11139: '1d', 3689: '2d'}
1074 >>> s = reg.mesh.eid2dim(eids=(11137, 11139, 3689), asbunch=True)
1075 >>> s == frozenset({'1d', '2d'})
1076 True
1077 >>> reg.mesh.eid2dim(eids=(11137, 11139, 3689), dims=('1d', '0d', '3d'))
1078 {11137: '1d', 11139: '1d'}
1079 """
1080 _d = self._eid2dim.copy()
1081 if eids:
1082 _d = {eid: dim for eid, dim in _d.items() if eid in eids}
1083 if dims:
1084 _d = {eid: dim for eid, dim in _d.items() if dim in dims}
1085 if asbunch:
1086 return frozenset(_d.values())
1087 return _d
1089 @cached_property
1090 def _eid2shape(self):
1091 ret = {}
1092 cards = sorted(list(self.reg.summary[ELEMENT]))
1093 for cardname in cards:
1094 card = self.reg.bulk[cardname]
1095 eids = card.carddata["main"][card.EID_FIELDNAME]
1096 ret.update(dict(zip(eids, [card.shape] * len(eids))))
1097 return ret
1099 def eid2shape(self, eids=None, shapes=None, asbunch=False):
1100 """
1101 return a mapping {eid: shape}
1103 :param eids: element IDs to filter
1104 :type eids: iterable of integers
1105 :param shapes: shapeensions to filter. (eg. '1d', '2d', '0d', etc...)
1106 :type shapes: iterable of valid shapeentsions names
1108 :returns: dictionnary mapping element IDs to shapeension
1110 >>> reg.mesh.eid2shape()
1111 {11137: 'line', ..., 3688: 'triangle', ...}
1112 >>> reg.mesh.eid2shape(eids=(11137, 11139, 3689))
1113 {11137: 'line', 11139: 'line', 3689: 'quad'}
1114 >>> reg.mesh.eid2shape(eids=(11137, 11139, 3689), shapes=('triangle', 'quad', '3d'))
1115 {3689: 'quad'}
1116 >>> s = reg.mesh.eid2shape(eids=(11137, 11139, 3689), asbunch=True)
1117 >>> s == frozenset({'line', 'quad'})
1118 True
1119 """
1120 _d = self._eid2shape.copy()
1121 if eids:
1122 _d = {eid: shape for eid, shape in _d.items() if eid in eids}
1123 if shapes:
1124 _d = {eid: shape for eid, shape in _d.items() if shape in shapes}
1125 if asbunch:
1126 return frozenset(_d.values())
1127 return _d
1129 @cached_property
1130 def _eid2pid(self):
1131 """
1132 return a mapping {eid: pid}
1133 """
1134 ret = {}
1135 cards = self.reg.summary[ELEMENT]
1136 for cardname in cards:
1137 card = self.reg.bulk[cardname]
1138 if not hasattr(card, "PID_FIELDNAME"):
1139 logging.info(f"{cardname} has not PID_FIELDNAME attr. Skip")
1140 continue
1141 eids = card.carddata["main"][card.EID_FIELDNAME]
1142 pids = card.carddata["main"][card.PID_FIELDNAME]
1143 ret.update(dict(zip(eids, pids)))
1144 return ret
1146 def eid2pid(self, eids=None, asbunch=False):
1147 """
1148 return a mapping {eid: pid}
1149 """
1150 eid2pid = self._eid2pid.copy()
1151 if eids:
1152 eid2pid = {eid: pid for eid, pid in eid2pid.items() if eid in eids}
1153 if asbunch:
1154 return frozenset(eid2pid.values())
1155 return eid2pid
1157 @cached_property
1158 def _pid2mids(self):
1159 pid2mids = {}
1160 for pname in self.reg.summary[PROPERTY]:
1161 prop = self.reg.bulk[pname]
1162 pid2mids.update(prop.pid2mids)
1163 return pid2mids
1165 def pid2mids(self, eids=None, pids=None, asbunch=False):
1166 """
1167 Return a dictionnary mapping PID to material IDs MIDS. If ``asbunch`` is
1168 ``True``, a single set of material IDs is returned.
1170 :param eids: *optional* restrict PIDs to element's property IDs
1171 :param pids: *optional* restrict PIDs to provided property IDs
1172 :param asbunch: should the mids be breakdowned by PID (``False``) or not
1173 (``True``)
1174 :type asbunch: bool
1176 :returns: ``dict`` or ``frozenset``
1178 >>> dic = reg.mesh.pid2mids()
1179 >>> dic == {1: frozenset({1}), 2: frozenset({1}), 4: frozenset({2, 3}),
1180 ... 5: frozenset({2, 4}), 6: frozenset({2, 4}), 7: frozenset({1, 5}),
1181 ... 8: frozenset(), 9: frozenset(), 10: frozenset()}
1182 True
1183 >>> reg.mesh.pid2mids(pids=(4,5), asbunch=True)
1184 frozenset({2, 3, 4})
1185 """
1186 pid2mids = self._pid2mids.copy()
1187 if eids:
1188 pids = self.eid2pid(eids=eids, asbunch=True)
1189 if pids:
1190 pid2mids = {pid: mids for pid, mids in pid2mids.items() if pid in pids}
1191 if asbunch:
1192 return bunch(pid2mids)
1193 return pid2mids
1195 @cached_property
1196 def _eid2mids(self):
1197 """
1198 return a mapping {eid: mid}
1199 """
1200 eid2pid = self._eid2pid.copy()
1201 return {eid: self._pid2mids[pid] for eid, pid in eid2pid.items()}
1203 def eid2mids(self, eids=None, asbunch=False):
1204 """
1205 Return a mapping {eid: mids}.
1207 :param eids: *optional* element IDs to investigate.
1208 :type eids: iterable of integer.
1210 :returns: ``dict`` (if ``asbunch`` is ``False``) or ``set`` (if ``asbunch`` is
1211 ``True``)
1213 >>> dic = reg.mesh.eid2mids(eids=(1, 5343))
1214 >>> dic == {5343: frozenset({2, 4}), 1: frozenset({1})}
1215 True
1216 """
1217 eid2mids = self._eid2mids.copy()
1218 if eids:
1219 eid2mids = {eid: mids for eid, mids in eid2mids.items() if eid in eids}
1220 if asbunch:
1221 return bunch(eid2mids)
1222 return eid2mids
1224 @cached_property
1225 def _eid2pcard(self):
1226 """
1227 return a mapping {EID: PCARD},
1228 eg. {1: 'PCOMP', 2: 'PBUSH', 3: 'PCOMP'}
1229 """
1230 ret = {}
1231 eid2pcard = {eid: self.pid2pcard()[pid] for eid, pid in self._eid2pid.items()}
1232 return eid2pcard
1234 def eid2pcard(self, eids=None, asbunch=False):
1235 """
1236 return a mapping {EID: PCARD},
1237 eg. {1: 'PCOMP', 2: 'PBUSH', 3: 'PCOMP'}
1238 """
1239 eid2pcard = self._eid2pcard.copy()
1240 if eids:
1241 eid2pcard = {eid: pcard for eid, pcard in eid2pcard.items() if eid in eids}
1242 if asbunch:
1243 return frozenset(eid2pcard.values())
1244 return eid2pcard
1246 # ========================================================================
1247 # eids_by_XXX
1248 # ========================================================================
1249 def eids_by_cards(self, cards=None):
1250 """
1251 return a unique set of eids defined by `cards` (any iterable).
1253 :param cards: cards defining elements to search
1254 :type cards: iterable of valid element card names
1256 :returns: a set of elements IDs
1258 >>> reg.mesh.eids_by_cards(cards=('CQUAD4', 'CBUSH'))
1259 {1, 2, 3, 4, ...}
1260 """
1261 if not cards:
1262 cards = self.reg.summary["element"]
1263 cards = set(cards)
1265 eids = set()
1266 for cardname in cards:
1267 card = self.reg.bulk[cardname]
1268 eids |= set(card.carddata["main"][card.EID_FIELDNAME])
1269 return eids
1271 def eids_by_dim(self, dim):
1272 """
1273 return a unique set of eids with provided dim. `dim` shall be one
1274 of {'0d', '1d', '2d', '3d'}.
1276 :param dim: dimension to search
1277 :type dim: string
1279 >>> reg.mesh.eids_by_dim(dim='0d')
1280 {11217, 11220}
1281 """
1282 assert dim in ("0d", "1d", "2d", "3d")
1283 cards = self.reg.summary.get(dim)
1284 if not cards:
1285 return set()
1286 return self.eids_by_cards(cards)
1288 def eids_by_shape(self, shape):
1289 """
1290 Return a unique set of eids with provided shape.
1292 >>> reg.mesh.eids_by_shape(shape='line')
1293 {11137, 11138, ..., 11175, 11176}
1295 ``shape`` shall be one of ``nastranio.constants.shapes``:
1297 >>> from nastranio.constants import shapes
1298 >>> shapes
1299 SHAPE(VERTICE='point', LINE='line', TRIA='triangle', QUAD='quad', MPC='mpc')
1300 >>> shapes.TRIA
1301 'triangle'
1302 """
1303 if shape not in shapes:
1304 raise ValueError(f"{shape} must be one of {shapes}")
1305 cards = self.reg.summary.get(shape)
1306 if not cards:
1307 return set()
1308 return self.eids_by_cards(cards)
1310 @cached_property
1311 def _pid2pcard(self):
1312 """
1313 return a dictionnary mapping Property ID <PID> to cardname <PCARD> {PID: PCARD}
1314 """
1315 pid2pcard = {}
1316 pcards = self.reg.summary[PROPERTY]
1317 for pcardname in pcards:
1318 pcard = self.reg.bulk[pcardname]
1319 pids = pcard.carddata["main"][pcard.PID_FIELDNAME]
1320 pid2pcard.update(dict(zip(pids, len(pids) * [pcardname])))
1321 return pid2pcard
1323 def pid2pcard(self):
1324 """
1325 return a dictionnary mapping Property ID <PID> to cardname <PCARD> {PID: PCARD}
1327 >>> dic = reg.mesh.pid2pcard()
1328 >>> dic == {4: 'PCOMP', 5: 'PCOMP', 6: 'PCOMP', 7: 'PCOMP',
1329 ... 8: 'PBUSH', 9: 'PBUSH', 10: 'PBUSH', 1: 'PSHELL', 2: 'PSHELL'}
1330 True
1332 """
1333 return self._pid2pcard
1335 @cached_property
1336 def _pid2eids(self):
1337 """return a dictionnary mapping PID to a set of concerned elements"""
1338 _pid2eids = defaultdict(set)
1339 for eid, pid in self.eid2pid().items():
1340 _pid2eids[pid].add(eid)
1341 return {pid: frozenset(eids) for pid, eids in _pid2eids.items()}
1343 def pid2eids(self, pids=None):
1344 if pids is None:
1345 return self._pid2eids
1346 return {pid: eids for pid, eids in self._pid2eids.items() if pid in pids}
1348 def pid2bbox(self):
1349 """return a dict mapping pid to 6 items tuple (xmin, ymin, zmin, xmax, ...)"""
1350 pid2bbox = {}
1351 allgrids = self.coords(incsys=0, asdf=True)[["X", "Y", "Z"]]
1352 for pid, gids in self.pid2gids().items():
1353 tokens = []
1354 grids = allgrids.loc[gids]
1355 tokens += grids.min().to_list()
1356 tokens += grids.max().to_list()
1357 pid2bbox[pid] = tuple(tokens)
1358 return pid2bbox
1360 def get_eid_bbox(self, eid):
1361 """non-cached bbox calculation for element. This calls caches:
1362 * grid array (via query_id)
1363 * eid2gids
1364 """
1365 grids = self.reg.container["bulk"]["GRID"]
1366 card = self.reg.container["bulk"][self.get_eid_cardname(eid)]
1367 _ = card.query_id_fast(eid, columns=card.gids_header)
1368 gids = _[card.gids_header[0]], _[card.gids_header[1]]
1369 ar2 = grids.query_id_fast(gids, columns=("X1", "X2", "X3"))
1370 xyz2 = np.array(list(ar2.values())).T
1371 res2 = np.hstack((np.min(xyz2, axis=0), np.max(xyz2, axis=0)))
1372 # ar = grids.query_id(gids)
1373 # xyz = ar.view((ar.dtype[2], len(ar.dtype.names)))[:, 2:5]
1374 # res = np.hstack((np.min(xyz, axis=0), np.max(xyz, axis=0)))
1375 # breakpoint()
1376 return res2
1378 @cached_property
1379 def _eid2bbox(self):
1380 grids = pd.DataFrame(
1381 self.reg.container["bulk"]["GRID"].array[["ID", "X1", "X2", "X3"]]
1382 )
1383 elements = pd.Series(self.reg.mesh.eid2gids(), name="gid")
1384 # elements.explode(0)
1385 df = elements.explode().to_frame().set_index("gid", append=True)
1386 df.index.names = ["eid", "gid"]
1387 df.reset_index(level=0, inplace=True)
1388 df = df.merge(grids, left_index=True, right_on="ID")
1389 df.index.names = ["gid"]
1390 df = df.reset_index().set_index("eid").sort_index().reset_index()
1391 df = df[["eid", "X1", "X2", "X3"]]
1392 bboxes = df.groupby("eid")
1393 mins = bboxes.min()
1394 maxs = bboxes.max()
1395 df = pd.merge(
1396 mins, maxs, left_index=True, right_index=True, suffixes=("_min", "_max")
1397 )
1398 return df
1400 def eid2bbox(self):
1401 return self._eid2bbox
1403 @cached_property
1404 def _pid2gids(self):
1405 _pid2gids = {}
1406 for pid, eids in self.pid2eids().items():
1407 gids = self.eid2gids(eids=eids, asbunch=True)
1408 _pid2gids[pid] = gids
1409 return _pid2gids
1411 def pid2gids(self, pids=None):
1412 if pids is None:
1413 return self._pid2gids
1414 return {pid: gids for pid, gids in self._pid2gids.items() if pid in pids}
1416 @cached_property
1417 def _gid2pids(self):
1418 ret = defaultdict(set)
1419 for pid, gids in self.pid2gids().items():
1420 for gid in gids:
1421 ret[gid].add(pid)
1422 return {gid: frozenset(pids) for gid, pids in ret.items()}
1424 def gid2pids(self, gids=None):
1425 if gids is None:
1426 return self._gid2pids.copy()
1427 return {gid: pids for gid, pids in self._gid2pids.items() if gid in gids}
1429 @cached_property
1430 def _pid2thk(self):
1431 """return a mapping {pid: thk}"""
1432 pid2thk = {}
1433 pcards = self.reg.summary[PROPERTY]
1434 for cardname, cardobj in self.reg.container[BULK.title].items():
1435 if cardname not in pcards or not hasattr(cardobj, "thk"):
1436 continue
1437 thks = pd.Series(**cardobj.thk).round(12).to_dict()
1438 pid2thk.update(thks)
1439 return pid2thk
1441 def pid2thk(self, pids=None):
1442 """return a mapping {pid: thk}
1444 >>> reg.mesh.pid2thk()
1445 {1: 0.09, 2: 0.126, 4: 0.5, 5: 0.375, 6: 0.5, 7: 0.75}
1446 >>> reg.mesh.pid2thk(pids=(1, 4, -1))
1447 {1: 0.09, 4: 0.5}
1448 """
1449 pid2thk = self._pid2thk
1450 if pids:
1451 return {k: v for k, v in pid2thk.items() if k in pids}
1452 return pid2thk
1454 def meetic(
1455 self,
1456 eids=None,
1457 gids=None,
1458 pids=None,
1459 cards=None,
1460 samepid=None,
1461 samecard=None,
1462 anglemax=None,
1463 min_paths=1,
1464 debug_eid=None,
1465 **kwargs,
1466 ):
1467 """filter out meetic dictionnary based on multiple criteria"""
1468 m = self._meetic.copy()
1470 def debug():
1471 if not debug_eid:
1472 return
1473 md = m[(m.eid1 == debug_eid) | (m.eid2 == debug_eid)]
1474 mc = md[["eid1", "eid2", "gid"]].groupby(["eid1", "eid2"]).count()
1475 print("nbpossibilities: %s" % len(md))
1476 print(mc)
1477 print("--------------------")
1479 # --------------------------------------------------------------------
1480 # build query
1481 queries = []
1482 if eids:
1483 queries.append("(eid1 in @eids | eid2 in @eids)")
1484 if gids:
1485 queries.append("(gid in @gids)")
1486 if pids:
1487 queries.append("(pid1 in @pids | pid2 in @ pids)")
1488 # m = m[(m.pid_1.isin(pids)) | (m.pid_2.isin(pids))]
1489 if cards:
1490 queries.append("(card1 in @cards | card2 in @ cards)")
1491 # m = m[(m.card_1.isin(cards)) | (m.card_2.isin(cards))]
1492 if anglemax:
1493 queries.append("(angle <= @anglemax)")
1494 # m = m[m.angle <= anglemax]
1495 if samecard is not None:
1496 assert isinstance(samecard, bool)
1497 queries.append("(same_card == @samecard)")
1498 # m = m[m.same_card == samecard]
1499 if samepid is not None:
1500 assert isinstance(samepid, bool)
1501 queries.append("(same_pid == @samepid)")
1502 # m = m[m.same_pid == samepid]
1503 for col, crit in kwargs.items():
1504 # m = m[m[col] == crit]
1505 queries.append(f'({col} == "{crit}")')
1506 query = " & ".join(queries)
1507 if query:
1508 logging.debug(f'query meetic with "{query}"')
1509 m = m.query(query)
1510 if min_paths > 1:
1511 mg = m[["eid1", "eid2", "gid"]].groupby(["eid1", "eid2"]).count()
1512 mg = mg[mg.gid >= min_paths]
1513 m = m.set_index(["eid1", "eid2"]).loc[mg.index]
1514 m = m.reset_index().set_index(["gid", "pathid"]).sort_index()
1515 return m
1517 def autogroup(self, meeticmod):
1518 """
1519 create groups of elements based on connectivity described by meeticmod
1520 """
1521 edges = {frozenset(edge) for edge in meeticmod[["eid1", "eid2"]].values}
1522 if HAS_NX:
1523 # if networkx is installed, use it
1524 G = nx.Graph()
1525 G.add_edges_from(edges)
1526 grps = [G.subgraph(c) for c in nx.connected_components(G)]
1527 grps = set(frozenset(g.nodes) for g in grps)
1528 return grps
1529 else:
1530 # home made
1531 return edges
1533 @cached_property
1534 def _meetic(self):
1535 """create an associativity array
1537 gid: eid1 | eid2
1538 """
1539 meetic = {}
1540 g2s = self._gid2eids
1541 for gid, eids in g2s.items():
1542 # sort eids such as eid1<eid2
1543 eids = sorted(list(eids))
1544 for i, pair in enumerate(combinations(eids, 2)):
1545 meetic[gid, i] = pair
1546 # --------------------------------------------------------------------
1547 # debug eid
1548 # debug_eid = 6107
1549 # for gid, eids in meetic.items():
1550 # if debug_eid in eids:
1551 # __import__('pdb').set_trace()
1552 # --------------------------------------------------------------------
1553 pairs = np.array(list(meetic.values()))
1554 gids = np.array(list(meetic.keys()))
1555 gidspairs = np.hstack((gids, pairs))
1556 df = pd.DataFrame(
1557 gidspairs, columns=["gid", "pathid", "eid1", "eid2"]
1558 ) # .set_index(['gid', 'pathid'])
1559 _df = df.copy()
1560 # --------------------------------------------------------------------
1561 # collect cards data
1562 e2d = self._eid2data.copy()
1563 df = pd.merge(df, e2d, left_on="eid1", right_index=True)
1564 df = pd.merge(df, e2d, left_on="eid2", right_index=True, suffixes=("1", "2"))
1565 df["same_card"] = df.card1 == df.card2
1566 df["same_pid"] = df.pid1 == df.pid2
1567 df["same_dim"] = df.dim1 == df.dim2
1568 # --------------------------------------------------------------------
1569 # eid2pcard.
1570 # Merge using `how='left'` since some elements (RBEx) do not have properties
1571 e2pc = pd.Series(self._eid2pcard, name="pcard").to_frame()
1572 df = pd.merge(df, e2pc, left_on="eid1", right_index=True, how="left")
1573 df = pd.merge(
1574 df, e2pc, left_on="eid2", right_index=True, suffixes=("1", "2"), how="left"
1575 )
1576 df["same_pcard"] = df.pcard1 == df.pcard2
1577 # --------------------------------------------------------------------
1578 # calculate angle...
1579 nn = self.normals()
1580 df["angle"] = angle_0_pi(
1581 nn.loc[df.eid1].values, nn.loc[df.eid2].values, range_0_pi=True
1582 )
1583 return df.sort_index()
1585 def free_edges(self, eids=None, _check_eids_as_2d=True):
1586 """
1587 Return free edges data for provided ``eids``. If ``eids`` is None, calculate
1588 free edges for the whole model.
1590 the returned data consists in a tuple (``free_edges``, ``free_edges_gids``)
1591 where:
1593 * ``free_edges`` is a ``frozenset`` of N ``frozensets``, where N is the number of free edges
1594 * ``free_edges_gids`` is a single frozenset of all the nodes on free edges.
1597 Only 2D elements are taken into account. If 0d, 1d or 3d elements are passed in
1598 the ``eids`` parameter, they are silently skipped.
1600 :param eids: element IDs
1601 :type eids: any iterable of integers OR ``None``
1603 :returns: tuple of ``frozensets``
1605 >>> fedges, fedges_gids = reg.mesh.free_edges()
1606 >>> fedges
1607 <networkx.classes.graph.Graph ...
1608 >>> fedges_gids
1609 {1, 2, 3, 4, ...
1610 >>> # providing ``eids`` restrict free edges to provided element IDs:
1611 >>> fedges, gids =reg.mesh.free_edges(eids=(1,))
1612 >>> gids
1613 {13, 12, 5, 15}
1614 >>> fedges.edges()
1615 EdgeView([(12, 5), (12, 15), (5, 13), (15, 13)])
1616 >>> fedges.edges()[(5, 12)]
1617 {'eid': 1, 'eids': frozenset({1})}
1618 """
1619 if not eids:
1620 _eids2d = self.eids_by_dim("2d")
1621 eids = _eids2d
1622 elif _check_eids_as_2d:
1623 _eids2d = self.eids_by_dim("2d")
1624 eids = set(eids) & _eids2d
1626 eid2gids = self.eid2gids(eids, keep_order=True)
1627 gid2eids = self.gid2eids(self.eid2gids(eids, asbunch=True))
1628 edges = []
1629 _eids = {}
1630 _all_eids_on_fedge = {}
1631 for eid, gids in eid2gids.items():
1632 for g1, g2 in zip(gids, gids[1:] + [gids[0]]):
1633 fs = frozenset((g1, g2))
1634 # if g1 > g2:
1635 # g1, g2 = g2, g1
1636 # fs = (g1, g2)
1637 edges.append(fs)
1638 # keep track of element ID generating the current free edge
1639 # since we will only keep SINGLE free edge, it's safe to store the data
1640 # as dictionnary
1641 _eids[fs] = eid
1642 # `eid` would miss triangulate element having one single node
1643 # on free edge. Correct it by collecting all eids attached
1644 # to the free edge
1645 # _all_eids_on_fedge[fs] = self.gid2eids(fs, asbunch=True) & eids
1646 _all_eids_on_fedge[fs] = (gid2eids[g1] | gid2eids[g2]) & eids
1647 # --------------------------------------------------------------------
1648 # find single edges
1649 fedges = nx.Graph()
1650 # generator of networkx-friendly tuples (g1, g2, {'eid': eid})
1651 single = (
1652 (*k, {"eid": _eids[k], "eids": _all_eids_on_fedge[k]})
1653 for k, v in Counter(edges).items()
1654 if v == 1
1655 )
1656 fedges.add_edges_from(single)
1657 fedges_gids = set(fedges.nodes())
1658 return fedges, fedges_gids
1660 def to_grouped_vtk(
1661 self,
1662 filename=None,
1663 eids=None,
1664 title="",
1665 include_nastran_ids=True,
1666 exclude_cards=(),
1667 include_cards=(),
1668 # grouping options:
1669 grp_model=True,
1670 grp_cards=True,
1671 grp_properties=True,
1672 user_groups=None,
1673 ):
1674 all_groups = {}
1675 # =================================================================
1676 # Whole model
1677 # =================================================================
1678 if grp_model:
1679 grid, gids_vtk2nas, eids_vtk2nas = self.to_vtk(
1680 title="whole model",
1681 )
1682 # for lcid, df in forces.groupby(level="subcaseid"):
1683 # _df = df.loc[lcid]
1684 # _df = _df.reindex(_id_order_by_vtk2nas(gids_vtk2nas))
1685 # _forces = list(df.T.to_dict(orient="list").values())
1686 # grid.point_data[f"force-LCID{lcid}"] = _forces
1687 all_groups["whole model"] = grid
1688 # =================================================================
1689 # model cards
1690 # =================================================================
1691 if grp_cards:
1692 cards = {}
1693 for card, eids in self.card2eids().items():
1694 grid, gids_vtk2nas, eids_vtk2nas = self.to_vtk(
1695 title=f"{card}S",
1696 eids=eids,
1697 )
1698 cards[card + "S"] = grid
1699 if len(cards) > 0:
1700 all_groups["element cards"] = pv.MultiBlock(cards)
1701 # =================================================================
1702 # model Properties
1703 # =================================================================
1704 if grp_properties:
1705 _pids = defaultdict(list)
1706 pids = {}
1707 for eid, pid in self.eid2pid().items():
1708 _pids[pid].append(eid)
1709 for pid, eids in _pids.items():
1710 grid_pid, gids_vtk2nas, eids_vtk2nas = self.to_vtk(
1711 title=f"{pids}S",
1712 eids=eids,
1713 )
1714 pids[f"PID{pid}"] = grid_pid
1715 if len(pids) > 0:
1716 all_groups["element PIDs"] = pv.MultiBlock(pids)
1717 # =====================================================================
1718 # user defined groups
1719 # assuming user_groups={'<groupname>': [eids]}
1720 # =====================================================================
1721 if user_groups:
1722 pids = {}
1723 for groupname, eids in user_groups.items():
1724 _pid, gids_vtk2nas, eids_vtk2nas = self.to_vtk(eids=eids)
1725 pids[groupname] = _pid
1726 if len(pids) > 0:
1727 all_groups["user def"] = pv.MultiBlock(pids)
1729 # put everything in the same .vtm file
1730 meshes = pv.MultiBlock(all_groups)
1731 if filename:
1732 _fname = os.path.abspath(filename)
1733 meshes.save(_fname, binary=not ascii)
1734 else:
1735 return meshes
1737 def to_vtk(
1738 self,
1739 filename=None,
1740 eids=None,
1741 title="",
1742 include_nastran_ids=True,
1743 exclude_cards=(),
1744 include_cards=(),
1745 ):
1746 """export VTK grid object, optionally as subset by providing
1747 expected elements `eids`.
1748 """
1749 if not ISPYVISTA or not ISVTK:
1750 raise RuntimeError("PyVista not installed")
1751 if not include_cards:
1752 include_cards = cards_mod.collection()
1753 exclude_cards = set(exclude_cards) | cards_mod.collection() - set(include_cards)
1754 eid2card = self._eid2card.copy()
1755 # --------------------------------------------------------------------
1756 # get elements of interest (if `eids` is None, get all elements)
1757 _eids = self.eid2gids(keep_order=False)
1758 _eids = {
1759 eid: _gids
1760 for eid, _gids in _eids.items()
1761 if eid2card[eid] not in exclude_cards
1762 }
1763 SUBMESH = False
1764 if eids:
1765 SUBMESH = True
1766 _eids = {eid: _gids for eid, _gids in _eids.items() if eid in eids}
1767 eids = np.array(list(_eids.keys()))
1768 # --------------------------------------------------------------------
1769 # get ALL gids and coordinates
1770 _gids = set()
1771 for eid, _gids_per_eid in _eids.items():
1772 _gids |= _gids_per_eid
1773 gids, coords, csys = self.coords(gids=_gids, asdf=False, incsys=0)
1774 # --------------------------------------------------------------------
1775 # iterate over each card having `to_vtk` method
1776 cardnames = self.reg.summary["element"] - exclude_cards
1778 cell_types = []
1779 cells = []
1780 cell_types = []
1781 cell_eids = []
1782 gids_vtk2nas = {}
1783 eids_vtk2nas = {}
1784 cards = []
1786 # --------------------------------------------------------------------
1787 # put RBEs as as last cards since they create "fake" elements
1788 # --------------------------------------------------------------------
1789 fake_elts_cards = ("RBE2", "RBE3")
1790 cardnames = [c for c in cardnames if c not in fake_elts_cards] + [
1791 c for c in fake_elts_cards if c in cardnames
1792 ]
1794 for cardname in cardnames:
1795 if SUBMESH:
1796 card = self.reg.container[BULK.title][cardname].subset(eids=eids)
1797 else:
1798 card = self.reg.container[BULK.title][cardname]
1799 # if card in not included in subset...
1800 if not hasattr(card, "to_vtk"):
1801 logging.info(f"export to VTK: skip {cardname} (no `to_vtk` method)")
1802 continue
1803 if len(card) == 0:
1804 logging.info(f"export to VTK: skip {cardname} (empty set)")
1805 continue
1807 vtkdata = card.to_vtk(nasgids=gids)
1808 cells.append(vtkdata["cells"])
1809 cell_types += vtkdata["cell_types"]
1810 cell_eids += vtkdata["eids"]
1811 cards += vtkdata["card"]
1812 # --------------------------------------------------------------------
1813 # map VTK elements indices to NASTRAN element IDs
1814 eids_vtk2nas = dict(zip(range(len(cell_eids)), cell_eids))
1815 # ----------------------------------------------------------------
1816 # map VTK node indices to NASTRAN node IDs
1817 gids_vtk2nas = dict(zip(range(len(gids)), gids))
1819 cells = np.concatenate(cells)
1820 cell_types = [getattr(vtk, attr) for attr in cell_types]
1821 cell_types = np.array(cell_types)
1822 nastran_cards = np.array(cards)
1823 grid = pv.UnstructuredGrid(cells, cell_types, coords)
1824 if include_nastran_ids:
1825 grid["NASgid"] = [gids_vtk2nas[i] for i in range(grid.n_points)]
1826 grid["NAScard"] = [nastran_cards[i] for i in range(grid.n_cells)]
1827 grid["NASeid"] = [eids_vtk2nas[i] for i in range(grid.n_cells)]
1828 grid["PID"] = [self._eid2pid[eid] for eid in grid["NASeid"]]
1829 if filename:
1830 fname = os.path.abspath(os.path.splitext(filename)[0] + ".vtu")
1831 grid.save(fname)
1832 print(f"saved {fname}")
1833 return
1834 return grid, gids_vtk2nas, eids_vtk2nas
1836 # def _to_gmsh_physical_names(self, data):
1837 # """
1838 # calculate GMSH physical names for:
1839 # * properties
1840 # * materials
1841 # *
1842 # """
1843 # breakpoint()
1845 def to_gmsh(
1846 self,
1847 filename=None,
1848 entities_field="pid",
1849 eid2data=None,
1850 gid2data=None,
1851 lcids=None,
1852 ):
1853 """dumps registry to GMSH mesh file"""
1854 # =====================================================================
1855 # build eids_data
1856 # =====================================================================
1857 eids_data = self.eid2data().copy()
1858 eids_data["_dimint_"] = eids_data["dim"].str.slice(0, 1).astype(int)
1859 eids_data = eids_data[
1860 ["_dimint_", "pid", "card", "const"]
1861 ].dropna() # discard CONM2, RBE2, etc...
1862 # cardname 2 gmsh element type
1863 c2t = pd.Series(
1864 {
1865 cname: getattr(self.reg.bulk[cname], "gmsh_eltype", None)
1866 for cname in self.reg.bulk
1867 },
1868 name="gmsh_eltype",
1869 )
1870 eids_data = eids_data.join(c2t, on="card", how="left")
1871 eids_data["gmsh_eltype"] = eids_data["gmsh_eltype"].astype(int)
1872 eids_data = eids_data.fillna("None")
1873 # ---------------------------------------------------------------------
1874 # eventually merge with additional eids_data.
1875 # eid2data being a dict of dict {:'newfield': {<eid>: <value>}}
1876 eids_data = eids_data.merge(
1877 pd.DataFrame(eid2data), left_index=True, right_index=True, how="left"
1878 )
1879 eids_data = eids_data.rename(
1880 columns={entities_field: "_entity_", "gmsh_eltype": "_gmsh_eltype_"}
1881 )
1882 # ---------------------------------------------------------------------
1883 # prepare eids2entity and entities2physicalnames
1884 colnames = [c for c in eids_data.columns if not c.startswith("_")]
1885 for colname in eids_data.columns:
1886 if colname in ("_dimint_", "_gmsh_eltype_"):
1887 continue
1888 _colname = colname
1889 if colname == "_entity_":
1890 _colname = entities_field
1891 eids_data.loc[:, colname] = f"{_colname}{GMSH_STR_SEP}" + eids_data[
1892 colname
1893 ].astype(str)
1895 entities2physicalnames = eids_data.reset_index().groupby(
1896 ["_dimint_", "_gmsh_eltype_", "_entity_"]
1897 )
1898 entities2physicalnames = entities2physicalnames.agg(
1899 dict(zip(colnames, [lambda x: set(x.tolist())] * len(colnames)))
1900 ).stack()
1901 entities2physicalnames.name = "_physicalnames_"
1902 entities2physicalnames = (
1903 entities2physicalnames.reset_index()
1904 .drop(columns=["level_3"])
1905 .set_index("_entity_")
1906 )
1907 eids2entity = eids_data[["_dimint_", "_gmsh_eltype_", "_entity_"]]
1908 # =====================================================================
1909 # build gids_data.
1910 # no _entity_ for them : one entity per GID
1911 # =====================================================================
1912 gids_data = self.coords(asdf=True)[["X", "Y", "Z"]]
1913 gids_data = gids_data.join(self.boundaries["dof"].astype(str))
1914 if gid2data is not None:
1915 gids_data = gids_data.merge(
1916 gid2data, left_index=True, right_index=True, how="left"
1917 )
1918 gids_data["_entity_"] = f"GID{GMSH_STR_SEP}" + gids_data.index.astype(str)
1919 for colname in gids_data.columns:
1920 if colname in ("X", "Y", "Z", "_entity_"):
1921 continue
1922 gids_data.loc[:, colname] = f"{colname}{GMSH_STR_SEP}" + gids_data[
1923 colname
1924 ].astype(str)
1925 gids_data["_dimint_"] = 0
1926 gids_data["_gmsh_eltype_"] = -1 # avoid NaN to keep column as integer
1927 colnames = [
1928 c for c in gids_data.columns if not c.startswith("_") and c not in "XYZ"
1929 ]
1930 gids2entity = gids_data[["_dimint_", "_gmsh_eltype_", "_entity_"]]
1931 _df = gids_data.reset_index().set_index(
1932 ["_dimint_", "_gmsh_eltype_", "_entity_"]
1933 )[colnames]
1934 _df = (
1935 _df.groupby(["_dimint_", "_gmsh_eltype_", "_entity_"])
1936 .agg(dict(zip(colnames, [lambda x: set(x.tolist())] * len(colnames))))
1937 .stack()
1938 )
1939 _df.name = "_physicalnames_"
1940 _df = _df.reset_index().drop(columns=["level_3"]).set_index("_entity_")
1941 entities2physicalnames = pd.concat((entities2physicalnames, _df))
1942 # =====================================================================
1943 # save _params
1944 # =====================================================================
1945 self._params = {
1946 "entities2physicalnames": entities2physicalnames,
1947 "gids2entity": gids2entity,
1948 "eids2entity": eids2entity,
1949 "eids_data": eids_data,
1950 "gids_data": gids_data,
1951 }
1952 lines = ["$MeshFormat", "4.1 0 8", "$EndMeshFormat"]
1953 # self._to_gmsh_prepro()
1954 lines += self._to_gmsh_physical_names()
1955 lines += self._to_gmsh_entities()
1956 lines += self._to_gmsh_nodes()
1957 lines += self._to_gmsh_elements()
1958 if lcids:
1959 if lcids is True:
1960 breakpoint()
1961 if isinstance(lcids, int):
1962 lcids = (lcids,)
1963 for lcid in lcids:
1964 lines += self._to_gmsh_loading(lcid)
1965 txt = "\n".join(lines)
1966 if filename:
1967 with open(filename, "w") as fh:
1968 fh.write(txt)
1969 return filename
1970 # delattr(self, "_params")
1971 return txt
1973 def _to_gmsh_physical_names(self):
1974 _physicalnames = {}
1976 lines = ["$PhysicalNames"]
1977 # =====================================================================
1978 # elements physical names
1979 # =====================================================================
1980 __physicalnames = set()
1981 for _entity_, _df in self._params["entities2physicalnames"].iterrows():
1982 __physicalnames |= {
1983 (_df["_dimint_"], _entity_),
1984 (_df["_dimint_"], next(iter(_df["_physicalnames_"]))),
1985 }
1986 ptag = 1
1987 null = f"{GMSH_STR_SEP}nan"
1988 for dimint, name in __physicalnames:
1989 if name.endswith(null):
1990 continue
1991 lines.append(f'{dimint} {ptag} "{name}"')
1992 _physicalnames[(dimint, name)] = ptag
1993 ptag += 1
1994 lines.insert(1, str(len(lines) - 1))
1995 lines.append("$EndPhysicalNames")
1996 self._params["physicalnames"] = _physicalnames
1997 return lines
1999 def _to_gmsh_entities(self):
2000 dim2count = {0: 0, 1: 0, 2: 0, 3: 0}
2001 _entities = {}
2002 entity_tag = 1
2003 tail = f"{GMSH_STR_SEP}nan"
2004 physicalnames = (
2005 self._params["entities2physicalnames"]
2006 .reset_index()
2007 .set_index(["_dimint_", "_entity_"])
2008 ).sort_index()
2009 pn2 = self._params["physicalnames"]
2010 # ---------------------------------------------------------------------
2011 # nodes entities
2012 gids_data = self._params["gids_data"]
2013 for gid, row in gids_data.iterrows():
2014 _entity_ = row["_entity_"]
2015 # if (
2016 # _entity_.endswith("tail")
2017 # or (0, _entity_) not in physicalnames["_physicalnames_"].index
2018 # ):
2019 # continue
2020 also = list(physicalnames["_physicalnames_"].loc[0, _entity_])
2021 grp_ids = [next(iter(i)) for i in also]
2022 grp_ids = [i for i in grp_ids if not i.endswith(tail)]
2023 if grp_ids:
2024 grp_ids = [str(pn2[(0, grpid)]) for grpid in grp_ids]
2025 # if not grp_ids:
2026 # continue
2027 dim2count[0] += 1
2028 _entities[(0, f"GID{GMSH_STR_SEP}{gid}")] = {
2029 "entity_tag": entity_tag,
2030 "bbox": row[["X", "Y", "Z"]].tolist(),
2031 "gid": gid,
2032 "physical_tags": grp_ids,
2033 }
2034 entity_tag += 1
2035 # ---------------------------------------------------------------------
2036 # elements entities
2037 eids_data = self._params["eids_data"].reset_index()
2039 for (dim, entity_value), df in eids_data.groupby(["_dimint_", "_entity_"]):
2040 if entity_value.endswith(tail):
2041 continue
2042 dim2count[dim] += 1
2043 eids = set(df.eid)
2044 gids = self.eid2gids(eids=eids, asbunch=True)
2045 xyz = self.coords(asdf=True).loc[list(gids)][["X", "Y", "Z"]]
2046 bbox = xyz.min().tolist() + xyz.max().tolist()
2047 # grp_ids = [self._params["physicalnames"][(dim, entity_value)]]
2048 grp_ids = [entity_value]
2049 also = list(physicalnames["_physicalnames_"].loc[(dim, entity_value)])
2050 grp_ids += [next(iter(i)) for i in also]
2051 if grp_ids:
2052 grp_ids = [str(pn2[(dim, grpid)]) for grpid in grp_ids]
2053 # if dim > 0:
2054 # breakpoint()
2055 _entities[(dim, entity_value)] = {
2056 "entity_tag": entity_tag,
2057 "bbox": bbox,
2058 "gids": gids,
2059 "eids": eids,
2060 "physical_tags": grp_ids,
2061 }
2062 entity_tag += 1
2063 # =====================================================================
2064 # dumping entities
2065 # =====================================================================
2066 lines = ["$Entities", " ".join(map(str, dim2count.values()))]
2067 for (dim, entity_value), entity_data in _entities.items():
2068 physical_tags = entity_data["physical_tags"]
2069 row = (
2070 [entity_data["entity_tag"]]
2071 + entity_data["bbox"]
2072 + [len(physical_tags)]
2073 + physical_tags
2074 + [0]
2075 )
2077 line = " ".join(map(str, row))
2078 lines.append(line)
2079 lines.append("$EndEntities")
2080 self._params["entities"] = _entities
2081 return lines
2083 def _to_gmsh_nodes(self):
2084 allgrids = self._params["gids_data"][["X", "Y", "Z"]]
2085 numEntityBlocks = 0
2086 numNodes = len(allgrids)
2087 minNodeTag = min(allgrids.index)
2088 maxNodeTag = max(allgrids.index)
2089 # iterate over entity_ids
2090 gid2entity_name = self._params["gids_data"]["_entity_"].to_dict()
2091 entities_data = self._params["entities"]
2092 dim = 0
2093 lines = ["$Nodes"]
2094 dummy_0ds = []
2095 for gid, _entity_ in gid2entity_name.items():
2096 entity_data = entities_data[(dim, _entity_)]
2097 xyz = entity_data["bbox"]
2098 lines.append(f"{dim} {entity_data['entity_tag']} 0 1\n{gid}")
2099 lines.append(" ".join(list(map(str, xyz))))
2100 numEntityBlocks += 1
2101 dummy_0ds.append({"entity_tag": entity_data["entity_tag"], "gid": gid})
2102 # ---------------------------------------------------------------------
2103 # dummy 0-D element at node's place
2104 lines.insert(1, f"{numEntityBlocks} {numNodes} {minNodeTag} {maxNodeTag}")
2105 lines.append("$EndNodes")
2106 self._params["dummy_0ds"] = dummy_0ds
2107 return lines
2109 def _to_gmsh_elements(self):
2110 eids_data = self._params["eids_data"]
2111 numEntityBlocks = 0
2112 numElements = len(eids_data) + len(self._params["dummy_0ds"])
2113 minElementTag = min(eids_data.index)
2114 maxElementTag = max(eids_data.index) + len(self._params["dummy_0ds"])
2115 entities_data = self._params["entities"]
2116 out = BytesIO()
2117 # iterate over entity_ids
2118 tail = f"{GMSH_STR_SEP}nan"
2119 for (dimint, eltype, entity_value, card), df in eids_data.groupby(
2120 ["_dimint_", "_gmsh_eltype_", "_entity_", "card"]
2121 ):
2122 if entity_value.endswith(tail):
2123 continue
2124 entity_data = entities_data[(dimint, entity_value)]
2125 out.write(
2126 f"{dimint} {entity_data['entity_tag']} {eltype} {len(df)}\n".encode()
2127 )
2128 card = card[5:]
2129 _data = pd.DataFrame(self.reg.bulk[card].array)
2130 nodes = _data.set_index("EID")[self.reg.bulk[card].gids_header]
2131 nodes = nodes[nodes.index.isin(df.index)].reset_index()
2132 np.savetxt(out, nodes, fmt="%d")
2133 numEntityBlocks += 1
2134 # also dummy 0d elements
2135 startat = max(eids_data.index) + 1
2136 for new_eid, dummy in enumerate(self._params["dummy_0ds"], start=startat):
2137 out.write(
2138 f"0 {dummy['entity_tag']} 15 1\n{new_eid} {dummy['gid']}\n".encode()
2139 )
2140 numEntityBlocks += 1
2142 lines = ["$Elements"]
2143 lines.append(f"{numEntityBlocks} {numElements} {minElementTag} {maxElementTag}")
2144 lines += out.getvalue().decode().strip("\n").split("\n")
2145 lines.append("$EndElements")
2146 return lines
2148 def _to_gmsh_loading(self, lcid):
2149 lcid_title = (
2150 f"{lcid}:" + self.reg.container["cases"][f"SUBCASE {lcid}"]["SUBTITLE"]
2151 )
2152 df_forces = (
2153 pd.DataFrame(self.reg.container["bulk"]["FORCE"].array)
2154 .set_index(["SID", "G"])[["N1", "N2", "N3"]]
2155 .reset_index("G")
2156 )
2157 lines = [
2158 "$NodeData",
2159 1,
2160 f'"{lcid_title}"',
2161 1,
2162 0.0,
2163 3,
2164 0,
2165 3,
2166 f"{len(df_forces.loc[lcid])}",
2167 ]
2168 eids_data = df_forces.to_csv(sep=" ", header=False, index=False)
2169 lines += eids_data.split("\n")
2170 lines.append("$EndNodeData")
2171 return map(str, lines)
2173 def expand(
2174 self,
2175 eids,
2176 by_cards=None,
2177 by_dims=None,
2178 by_shapes=None,
2179 by_eids=None,
2180 new_only=False,
2181 ):
2182 """
2183 Given a set of elements ``eids``, increase this set by surrounding
2184 elements, eventually restricted by cards, dim or shape.
2186 None or Only one of ``by_card``, ``by_dim`` or ``by_shape`` parameter
2187 shall be passed.
2189 :param eids: set of element to expand.
2190 :type eids: sequence of integers
2192 :param by_cards: *optional* sequence of cards to search for
2193 :type by_cards: sequence of strings
2195 :param by_dims: *optional* sequence of dims to search for
2196 :type by_dims: sequence of strings
2198 :param by_shapes: *optional* sequence of shapes to search for
2199 :type by_shapes: sequence of strings
2201 :param by_eids: *optional* sequence of eids to search for
2202 :type by_eids: sequence of integers
2204 :returns: set of elements eids
2206 >>> # expand whatever the connected elements:
2207 >>> reg.mesh.expand(eids=(1,2)) == frozenset({1, 2, 3, 11139, 4, 5, 7, 8, 9, 6})
2208 True
2209 >>> # expand, but only on CBUSH:
2210 >>> reg.mesh.expand(eids=(1,2), by_cards=('CBUSH',))
2211 frozenset({1, 2, 11139})
2212 >>> # expand, but only on 1D elements
2213 >>> reg.mesh.expand(eids=(1,2), by_dims=('1d',))
2214 frozenset({1, 2, 11139})
2215 >>> reg.mesh.expand(eids=(1,2), by_shapes=('line',))
2216 frozenset({1, 2, 11139})
2217 >>> # expand, but only using elements provided
2218 >>> # this is use full to prepare a more complex expand
2219 >>> # for example, expanding, but only using small 1d elements:
2220 >>> df = reg.mesh.eid2data()
2221 >>> small_1d = df[(df['length'] <= 0.001) & df['card'].isin(('CBUSH', 'RBE2'))]
2222 >>> reg.mesh.expand(eids=(1,2), by_eids=small_1d.index.tolist())
2223 frozenset({1, 2})
2224 >>> reg.mesh.expand(eids=(1,2), by_eids=[], new_only=True)
2225 frozenset()
2226 """
2227 eids = set(eids)
2228 gids = self.eid2gids(eids=eids, asbunch=True)
2229 # get elements using those nodes
2230 expanded_eids = self.gid2eids(gids=gids, asbunch=True)
2231 expanded_eids -= eids # keep only new elements
2232 # --------------------------------------------------------------------
2233 # excluding by criteria
2234 if by_cards:
2235 expanded_eids -= {
2236 eid
2237 for eid, card in self.eid2card(expanded_eids).items()
2238 if card not in by_cards
2239 }
2240 elif by_dims:
2241 expanded_eids -= {
2242 eid
2243 for eid, card in self.eid2dim(expanded_eids).items()
2244 if card not in by_dims
2245 }
2246 elif by_shapes:
2247 expanded_eids -= {
2248 eid
2249 for eid, card in self.eid2shape(expanded_eids).items()
2250 if card not in by_shapes
2251 }
2252 elif by_eids is not None:
2253 expanded_eids &= set(by_eids)
2254 if new_only:
2255 return frozenset(expanded_eids)
2256 return frozenset(eids | expanded_eids)
2258 # ========================================================================
2259 # constraints & loading
2260 # ========================================================================
2262 @cached_property
2263 def load_combination(self):
2264 """return LOAD cards as DataFrame"""
2265 cards = self.reg.container["bulk"]["LOAD"]
2266 if not cards:
2267 return
2268 loads = pd.DataFrame(cards.array)
2269 gridsets = cards.carddata["load_factors"]
2270 df = pd.concat({i: pd.DataFrame(d) for i, d in enumerate(gridsets)})
2271 df.index.names = ["load_factorsID", "#"]
2272 df = df.rename(columns={"S": "Si", "L": "Li"})
2273 df = df.reset_index()
2274 merged = loads.merge(
2275 df, how="outer", left_on="load_factorsID", right_on="load_factorsID"
2276 )
2277 merged = merged.drop(columns=["load_factorsID", "#"])
2278 return merged.set_index("SID")
2280 @cached_property
2281 def boundaries(self):
2282 """return dictionnary mapping nodes to constraint DOF
2283 >>> reg.mesh.boundaries()
2284 dof sid source
2285 gid
2286 12508 123 290 SPC1
2287 12509 123 290 SPC1
2288 12510 123 290 SPC1
2289 12511 123 290 SPC1
2290 12516 123456 -1 GRID
2291 12517 123456 -1 GRID
2292 """
2293 # --------------------------------------------------------------------
2294 # getting SPC1
2295 spc1 = pd.DataFrame(self.reg.container["bulk"]["SPC1"].array)
2296 # SID C spc1_gridsetID
2297 # 0 290 123 0
2298 # 1 290 123 1
2299 # 2 290 123 2
2300 # 3 290 123 3
2301 gridsets = self.reg.container["bulk"]["SPC1"].carddata["spc1_gridset"]
2302 df = pd.concat({i: pd.DataFrame(d) for i, d in enumerate(gridsets)})
2303 df.index.names = ["spc1_gridsetID", "#"]
2304 df.reset_index(level=-1, inplace=True)
2305 df = spc1.merge(df, right_index=True, left_on="spc1_gridsetID")[
2306 ["SID", "C", "G"]
2307 ]
2308 df = df.reset_index(drop=True)
2309 df["source"] = "SPC1"
2310 # --------------------------------------------------------------------
2311 # getting permanent constraints nodes "SD"
2312 nodes = pd.DataFrame(self.reg.container["bulk"]["GRID"].array)
2313 nodes = nodes.dropna(subset=["PS"])[["ID", "PS"]]
2314 nodes.columns = ["G", "C"]
2315 nodes["source"] = "GRID"
2316 # ID CP X1 X2 X3 CD PS SEID
2317 # 4392 12516 0 372.45 -42.28 0.0 0 123456.0 NaN
2318 # 4393 12517 0 363.95 -42.28 0.0 0 123456.0 NaN
2320 # df = pd.concat((df, nodes))
2321 df = pd.concat((df, nodes), axis=0)
2322 df.C = df.C.astype(int)
2323 df = df[["C", "G", "SID", "source"]] # ensure columns order
2324 # df.SID = df.SID.astype(int) # SID may be NaN if source is "GRID"
2325 df.columns = ["dof", "gid", "sid", "source"]
2326 df.sid = df.sid.fillna(-1).astype(int)
2327 # --------------------------------------------------------------------
2328 return df.reset_index(drop=True).set_index("gid")
2331if __name__ == "__main__":
2332 import doctest
2334 doctest.testmod(optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)