diff --git a/.github/workflows/ci_cd.yml b/.github/workflows/ci_cd.yml index 5cab9f209e..326fc6668e 100644 --- a/.github/workflows/ci_cd.yml +++ b/.github/workflows/ci_cd.yml @@ -15,7 +15,7 @@ on: env: DOCKER_IMAGE_NAME: ghcr.io/ansys/prime - DOCKER_IMAGE_TAG: '24.2.0' + DOCKER_IMAGE_TAG: '25.1.0.dev14' MAIN_PYTHON_VERSION: '3.10' PACKAGE_NAME: 'ansys-meshing-prime' PACKAGE_NAMESPACE: 'ansys.meshing.prime' @@ -117,7 +117,7 @@ jobs: PYPRIMEMESH_LAUNCH_CONTAINER: 1 PYPRIMEMESH_SPHINX_BUILD: 1 PYPRIMEMESH_IMAGE_TAG: ${{ env.DOCKER_IMAGE_TAG }} - ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER )}} + ANSYSLMD_LICENSE_FILE: '1055@${{ secrets.LICENSE_SERVER }}' testing: name: Run Unit Tests @@ -147,7 +147,7 @@ jobs: env: PYPRIMEMESH_LAUNCH_CONTAINER: 1 PYPRIMEMESH_IMAGE_TAG: ${{ env.DOCKER_IMAGE_TAG }} - ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER )}} + ANSYSLMD_LICENSE_FILE: '1055@${{ secrets.LICENSE_SERVER }}' with: python-version: ${{ env.MAIN_PYTHON_VERSION }} diff --git a/doc/changelog.d/882.maintenance.md b/doc/changelog.d/882.maintenance.md new file mode 100644 index 0000000000..224bfde0fd --- /dev/null +++ b/doc/changelog.d/882.maintenance.md @@ -0,0 +1 @@ +MAINT: update code form ADO 0.7.0.dev14 \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index bde0bd5cf7..cc51110516 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "flit_core.buildapi" [project] name = "ansys-meshing-prime" -version = "0.6.1" +version = "0.7.0.dev14" description = "PyPrimeMesh is a Python client to Ansys Prime Server, which delivers core Ansys meshing technology." readme = "README.md" requires-python = ">=3.10,<4" diff --git a/src/ansys/meshing/prime/autogen/commontypes.py b/src/ansys/meshing/prime/autogen/commontypes.py index e3cc1c930b..79868ec900 100644 --- a/src/ansys/meshing/prime/autogen/commontypes.py +++ b/src/ansys/meshing/prime/autogen/commontypes.py @@ -34,8 +34,14 @@ class FaceQualityMeasure(enum.IntEnum): """ SKEWNESS = 0 """The Skewness metric ranges between 0 (worst) and 1 (best). A value of 0 indicates an equilateral cell (best) and a value of 1 indicates a completely degenerate cell (worst).""" + SIZECHANGE = 2 + """Size Change is the maximum ratio of the area of each neighboring face element to the area of face element when the area of the face element is smaller than the neighbor. The minimum value for size change is 1.""" ASPECTRATIO = 5 """The Aspect Ratio metric is greater than 1. A value of 1 indicates an equilateral cell (best) and a value of 20(e.g) indicates a stretched cell (worst).""" + WARP = 7 + """Face quality metric to check warping factor. + + **This is a beta parameter**. **The behavior and name may change in the future**.""" ELEMENTQUALITY = 50 """The Element Quality metric ranges between 0 (worst) and 1 (best). A value of 1 indicates a perfect cube or square (best) while a value of 0 indicates that the element has a zero or negative volume (worst).""" diff --git a/src/ansys/meshing/prime/autogen/connect.py b/src/ansys/meshing/prime/autogen/connect.py index 4615036824..a6c65969be 100644 --- a/src/ansys/meshing/prime/autogen/connect.py +++ b/src/ansys/meshing/prime/autogen/connect.py @@ -174,7 +174,7 @@ def subtract_volumes(self, part_id : int, target_volumes : Iterable[int], cutter return SubtractVolumesResults(model = self._model, json_data = result) def stitch_face_zonelets(self, part_id : int, face_zonelet_ids : Iterable[int], with_face_zonelet_ids : Iterable[int], params : StitchParams) -> ConnectResults: - """ Stitches a set of face zonelets with another set of face zones. + """ Stitches a set of face zonelets with another set of face zonelets. Parameters diff --git a/src/ansys/meshing/prime/autogen/connectstructs.py b/src/ansys/meshing/prime/autogen/connectstructs.py index ba7ad7101b..b62595fa5e 100644 --- a/src/ansys/meshing/prime/autogen/connectstructs.py +++ b/src/ansys/meshing/prime/autogen/connectstructs.py @@ -1729,11 +1729,11 @@ class FuseParams(CoreObject): model: Model Model to create a ``FuseParams`` object with default parameters. use_absolute_tolerance: bool, optional - When true, gap tolerance and side tolerance provided are absolute values. + Option to use absolute tolerance value. The default value is false. When use absolute tolerance is true, gap tolerance and side tolerance provided are absolute values. **This is a beta parameter**. **The behavior and name may change in the future**. gap_tolerance: float, optional - Gap tolerance between faces to be fused. + Gap tolerance between faces to be fused. The default value is 0.05. **This is a beta parameter**. **The behavior and name may change in the future**. side_tolerance: float, optional @@ -1741,39 +1741,39 @@ class FuseParams(CoreObject): **This is a beta parameter**. **The behavior and name may change in the future**. check_interior: bool, optional - When true, checks all nodes including boundary edge nodes and nodes inside the faces. + Option to check the interior nodes. The default value is false. When check interior is true, checks all nodes including boundary edge nodes and nodes inside the faces. **This is a beta parameter**. **The behavior and name may change in the future**. fuse_option: FuseOption, optional - Option for treatment of fused surfaces. + Option for treatment of fused surfaces. The default value is None. **This is a beta parameter**. **The behavior and name may change in the future**. check_orientation: bool, optional - Option to check face normal orientation during fuse operation. + Option to check the face normal orientation during fuse operation. The default value is true. When check orientation is true, face normal orientation is checked during fuse operation. **This is a beta parameter**. **The behavior and name may change in the future**. dump_mesh: bool, optional - Option to dump mesh for debugging. + Option to save mesh for debugging. The default value is false. When dump mesh is true, mesh is saved for debugging. **This is a beta parameter**. **The behavior and name may change in the future**. local_remesh: bool, optional - Local remesh of region to be fused. + Option to remesh the fused region. The default value is true. When local remesh is true, local remeshing of fuse region is performed. **This is a beta parameter**. **The behavior and name may change in the future**. n_layers: int, optional - Face layers around region to be fused. + Face layers around region to be fused. The default value is 2. **This is a beta parameter**. **The behavior and name may change in the future**. separate: bool, optional - Separate region to be fused. + Option to separate fused regions. The default value is false. When separate is true, the fuse regions are separated into different zonelets. **This is a beta parameter**. **The behavior and name may change in the future**. angle: float, optional - Faces zonelets with angle less than the provided value are considered for fuse operation. + Faces zonelets with angle less than the provided value are considered for fuse operation. Default value is 45 degrees. **This is a beta parameter**. **The behavior and name may change in the future**. fuse_edges_only: bool, optional - Fuse edges only. + Option to fuse edges. The default value is false. When fuse edges only is true, only edges are fused. **This is a beta parameter**. **The behavior and name may change in the future**. json_data: dict, optional @@ -1836,11 +1836,11 @@ def __init__( model: Model Model to create a ``FuseParams`` object with default parameters. use_absolute_tolerance: bool, optional - When true, gap tolerance and side tolerance provided are absolute values. + Option to use absolute tolerance value. The default value is false. When use absolute tolerance is true, gap tolerance and side tolerance provided are absolute values. **This is a beta parameter**. **The behavior and name may change in the future**. gap_tolerance: float, optional - Gap tolerance between faces to be fused. + Gap tolerance between faces to be fused. The default value is 0.05. **This is a beta parameter**. **The behavior and name may change in the future**. side_tolerance: float, optional @@ -1848,39 +1848,39 @@ def __init__( **This is a beta parameter**. **The behavior and name may change in the future**. check_interior: bool, optional - When true, checks all nodes including boundary edge nodes and nodes inside the faces. + Option to check the interior nodes. The default value is false. When check interior is true, checks all nodes including boundary edge nodes and nodes inside the faces. **This is a beta parameter**. **The behavior and name may change in the future**. fuse_option: FuseOption, optional - Option for treatment of fused surfaces. + Option for treatment of fused surfaces. The default value is None. **This is a beta parameter**. **The behavior and name may change in the future**. check_orientation: bool, optional - Option to check face normal orientation during fuse operation. + Option to check the face normal orientation during fuse operation. The default value is true. When check orientation is true, face normal orientation is checked during fuse operation. **This is a beta parameter**. **The behavior and name may change in the future**. dump_mesh: bool, optional - Option to dump mesh for debugging. + Option to save mesh for debugging. The default value is false. When dump mesh is true, mesh is saved for debugging. **This is a beta parameter**. **The behavior and name may change in the future**. local_remesh: bool, optional - Local remesh of region to be fused. + Option to remesh the fused region. The default value is true. When local remesh is true, local remeshing of fuse region is performed. **This is a beta parameter**. **The behavior and name may change in the future**. n_layers: int, optional - Face layers around region to be fused. + Face layers around region to be fused. The default value is 2. **This is a beta parameter**. **The behavior and name may change in the future**. separate: bool, optional - Separate region to be fused. + Option to separate fused regions. The default value is false. When separate is true, the fuse regions are separated into different zonelets. **This is a beta parameter**. **The behavior and name may change in the future**. angle: float, optional - Faces zonelets with angle less than the provided value are considered for fuse operation. + Faces zonelets with angle less than the provided value are considered for fuse operation. Default value is 45 degrees. **This is a beta parameter**. **The behavior and name may change in the future**. fuse_edges_only: bool, optional - Fuse edges only. + Option to fuse edges. The default value is false. When fuse edges only is true, only edges are fused. **This is a beta parameter**. **The behavior and name may change in the future**. json_data: dict, optional @@ -1965,29 +1965,29 @@ def set_default( Parameters ---------- use_absolute_tolerance: bool, optional - When true, gap tolerance and side tolerance provided are absolute values. + Option to use absolute tolerance value. The default value is false. When use absolute tolerance is true, gap tolerance and side tolerance provided are absolute values. gap_tolerance: float, optional - Gap tolerance between faces to be fused. + Gap tolerance between faces to be fused. The default value is 0.05. side_tolerance: float, optional Side tolerance for fusing to the side edges. check_interior: bool, optional - When true, checks all nodes including boundary edge nodes and nodes inside the faces. + Option to check the interior nodes. The default value is false. When check interior is true, checks all nodes including boundary edge nodes and nodes inside the faces. fuse_option: FuseOption, optional - Option for treatment of fused surfaces. + Option for treatment of fused surfaces. The default value is None. check_orientation: bool, optional - Option to check face normal orientation during fuse operation. + Option to check the face normal orientation during fuse operation. The default value is true. When check orientation is true, face normal orientation is checked during fuse operation. dump_mesh: bool, optional - Option to dump mesh for debugging. + Option to save mesh for debugging. The default value is false. When dump mesh is true, mesh is saved for debugging. local_remesh: bool, optional - Local remesh of region to be fused. + Option to remesh the fused region. The default value is true. When local remesh is true, local remeshing of fuse region is performed. n_layers: int, optional - Face layers around region to be fused. + Face layers around region to be fused. The default value is 2. separate: bool, optional - Separate region to be fused. + Option to separate fused regions. The default value is false. When separate is true, the fuse regions are separated into different zonelets. angle: float, optional - Faces zonelets with angle less than the provided value are considered for fuse operation. + Faces zonelets with angle less than the provided value are considered for fuse operation. Default value is 45 degrees. fuse_edges_only: bool, optional - Fuse edges only. + Option to fuse edges. The default value is false. When fuse edges only is true, only edges are fused. """ args = locals() [FuseParams._default_params.update({ key: value }) for key, value in args.items() if value is not None] @@ -2040,7 +2040,7 @@ def __str__(self) -> str: @property def use_absolute_tolerance(self) -> bool: - """When true, gap tolerance and side tolerance provided are absolute values. + """Option to use absolute tolerance value. The default value is false. When use absolute tolerance is true, gap tolerance and side tolerance provided are absolute values. **This is a beta parameter**. **The behavior and name may change in the future**. """ @@ -2052,7 +2052,7 @@ def use_absolute_tolerance(self, value: bool): @property def gap_tolerance(self) -> float: - """Gap tolerance between faces to be fused. + """Gap tolerance between faces to be fused. The default value is 0.05. **This is a beta parameter**. **The behavior and name may change in the future**. """ @@ -2076,7 +2076,7 @@ def side_tolerance(self, value: float): @property def check_interior(self) -> bool: - """When true, checks all nodes including boundary edge nodes and nodes inside the faces. + """Option to check the interior nodes. The default value is false. When check interior is true, checks all nodes including boundary edge nodes and nodes inside the faces. **This is a beta parameter**. **The behavior and name may change in the future**. """ @@ -2088,7 +2088,7 @@ def check_interior(self, value: bool): @property def fuse_option(self) -> FuseOption: - """Option for treatment of fused surfaces. + """Option for treatment of fused surfaces. The default value is None. **This is a beta parameter**. **The behavior and name may change in the future**. """ @@ -2100,7 +2100,7 @@ def fuse_option(self, value: FuseOption): @property def check_orientation(self) -> bool: - """Option to check face normal orientation during fuse operation. + """Option to check the face normal orientation during fuse operation. The default value is true. When check orientation is true, face normal orientation is checked during fuse operation. **This is a beta parameter**. **The behavior and name may change in the future**. """ @@ -2112,7 +2112,7 @@ def check_orientation(self, value: bool): @property def dump_mesh(self) -> bool: - """Option to dump mesh for debugging. + """Option to save mesh for debugging. The default value is false. When dump mesh is true, mesh is saved for debugging. **This is a beta parameter**. **The behavior and name may change in the future**. """ @@ -2124,7 +2124,7 @@ def dump_mesh(self, value: bool): @property def local_remesh(self) -> bool: - """Local remesh of region to be fused. + """Option to remesh the fused region. The default value is true. When local remesh is true, local remeshing of fuse region is performed. **This is a beta parameter**. **The behavior and name may change in the future**. """ @@ -2136,7 +2136,7 @@ def local_remesh(self, value: bool): @property def n_layers(self) -> int: - """Face layers around region to be fused. + """Face layers around region to be fused. The default value is 2. **This is a beta parameter**. **The behavior and name may change in the future**. """ @@ -2148,7 +2148,7 @@ def n_layers(self, value: int): @property def separate(self) -> bool: - """Separate region to be fused. + """Option to separate fused regions. The default value is false. When separate is true, the fuse regions are separated into different zonelets. **This is a beta parameter**. **The behavior and name may change in the future**. """ @@ -2160,7 +2160,7 @@ def separate(self, value: bool): @property def angle(self) -> float: - """Faces zonelets with angle less than the provided value are considered for fuse operation. + """Faces zonelets with angle less than the provided value are considered for fuse operation. Default value is 45 degrees. **This is a beta parameter**. **The behavior and name may change in the future**. """ @@ -2172,7 +2172,7 @@ def angle(self, value: float): @property def fuse_edges_only(self) -> bool: - """Fuse edges only. + """Option to fuse edges. The default value is false. When fuse edges only is true, only edges are fused. **This is a beta parameter**. **The behavior and name may change in the future**. """ @@ -2208,6 +2208,10 @@ class FuseResults(CoreObject): intersecting_locations: Iterable[float], optional Locations where the fuse operation did not remove self-intersections in the input. Each location corresponds to a patch of faces where self-intersections exist. The number of elements in intersecting locations are in multiples of three. For example, zero, three, six, nine, and so on. Each triplet corresponds to coordinates in x, y, and z. For example, if the intersecting locations contain (a, b, c, d, e, f), then (a, b, c) represent the first location and (d, e, f) represent the second location. + **This is a beta parameter**. **The behavior and name may change in the future**. + intersecting_zonelet_pairs: List[OverlapPairs], optional + Contains pairs of face zonelet ids where the fuse operation failed to remove self-intersections in the input. + **This is a beta parameter**. **The behavior and name may change in the future**. json_data: dict, optional JSON dictionary to create a ``FuseResults`` object with provided parameters. @@ -2224,12 +2228,14 @@ def __initialize( fused_area: float, error_code: ErrorCode, warning_codes: List[WarningCode], - intersecting_locations: Iterable[float]): + intersecting_locations: Iterable[float], + intersecting_zonelet_pairs: List[OverlapPairs]): self._fused_pairs = fused_pairs self._fused_area = fused_area self._error_code = ErrorCode(error_code) self._warning_codes = warning_codes self._intersecting_locations = intersecting_locations if isinstance(intersecting_locations, np.ndarray) else np.array(intersecting_locations, dtype=np.double) if intersecting_locations is not None else None + self._intersecting_zonelet_pairs = intersecting_zonelet_pairs def __init__( self, @@ -2239,6 +2245,7 @@ def __init__( error_code: ErrorCode = None, warning_codes: List[WarningCode] = None, intersecting_locations: Iterable[float] = None, + intersecting_zonelet_pairs: List[OverlapPairs] = None, json_data : dict = None, **kwargs): """Initialize a ``FuseResults`` object. @@ -2266,6 +2273,10 @@ def __init__( intersecting_locations: Iterable[float], optional Locations where the fuse operation did not remove self-intersections in the input. Each location corresponds to a patch of faces where self-intersections exist. The number of elements in intersecting locations are in multiples of three. For example, zero, three, six, nine, and so on. Each triplet corresponds to coordinates in x, y, and z. For example, if the intersecting locations contain (a, b, c, d, e, f), then (a, b, c) represent the first location and (d, e, f) represent the second location. + **This is a beta parameter**. **The behavior and name may change in the future**. + intersecting_zonelet_pairs: List[OverlapPairs], optional + Contains pairs of face zonelet ids where the fuse operation failed to remove self-intersections in the input. + **This is a beta parameter**. **The behavior and name may change in the future**. json_data: dict, optional JSON dictionary to create a ``FuseResults`` object with provided parameters. @@ -2280,16 +2291,18 @@ def __init__( json_data["fusedArea"] if "fusedArea" in json_data else None, ErrorCode(json_data["errorCode"] if "errorCode" in json_data else None), [WarningCode(data) for data in json_data["warningCodes"]] if "warningCodes" in json_data else None, - json_data["intersectingLocations"] if "intersectingLocations" in json_data else None) + json_data["intersectingLocations"] if "intersectingLocations" in json_data else None, + [OverlapPairs(model = model, json_data = data) for data in json_data["intersectingZoneletPairs"]] if "intersectingZoneletPairs" in json_data else None) else: - all_field_specified = all(arg is not None for arg in [fused_pairs, fused_area, error_code, warning_codes, intersecting_locations]) + all_field_specified = all(arg is not None for arg in [fused_pairs, fused_area, error_code, warning_codes, intersecting_locations, intersecting_zonelet_pairs]) if all_field_specified: self.__initialize( fused_pairs, fused_area, error_code, warning_codes, - intersecting_locations) + intersecting_locations, + intersecting_zonelet_pairs) else: if model is None: raise ValueError("Invalid assignment. Either pass a model or specify all properties.") @@ -2301,7 +2314,8 @@ def __init__( fused_area if fused_area is not None else ( FuseResults._default_params["fused_area"] if "fused_area" in FuseResults._default_params else (json_data["fusedArea"] if "fusedArea" in json_data else None)), error_code if error_code is not None else ( FuseResults._default_params["error_code"] if "error_code" in FuseResults._default_params else ErrorCode(json_data["errorCode"] if "errorCode" in json_data else None)), warning_codes if warning_codes is not None else ( FuseResults._default_params["warning_codes"] if "warning_codes" in FuseResults._default_params else [WarningCode(data) for data in (json_data["warningCodes"] if "warningCodes" in json_data else None)]), - intersecting_locations if intersecting_locations is not None else ( FuseResults._default_params["intersecting_locations"] if "intersecting_locations" in FuseResults._default_params else (json_data["intersectingLocations"] if "intersectingLocations" in json_data else None))) + intersecting_locations if intersecting_locations is not None else ( FuseResults._default_params["intersecting_locations"] if "intersecting_locations" in FuseResults._default_params else (json_data["intersectingLocations"] if "intersectingLocations" in json_data else None)), + intersecting_zonelet_pairs if intersecting_zonelet_pairs is not None else ( FuseResults._default_params["intersecting_zonelet_pairs"] if "intersecting_zonelet_pairs" in FuseResults._default_params else [OverlapPairs(model = model, json_data = data) for data in (json_data["intersectingZoneletPairs"] if "intersectingZoneletPairs" in json_data else None)])) self._custom_params = kwargs if model is not None: [ model._logger.warning(f'Unsupported argument : {key}') for key in kwargs ] @@ -2315,7 +2329,8 @@ def set_default( fused_area: float = None, error_code: ErrorCode = None, warning_codes: List[WarningCode] = None, - intersecting_locations: Iterable[float] = None): + intersecting_locations: Iterable[float] = None, + intersecting_zonelet_pairs: List[OverlapPairs] = None): """Set the default values of the ``FuseResults`` object. Parameters @@ -2330,6 +2345,8 @@ def set_default( Warning codes associated with the fuse operation. intersecting_locations: Iterable[float], optional Locations where the fuse operation did not remove self-intersections in the input. Each location corresponds to a patch of faces where self-intersections exist. The number of elements in intersecting locations are in multiples of three. For example, zero, three, six, nine, and so on. Each triplet corresponds to coordinates in x, y, and z. For example, if the intersecting locations contain (a, b, c, d, e, f), then (a, b, c) represent the first location and (d, e, f) represent the second location. + intersecting_zonelet_pairs: List[OverlapPairs], optional + Contains pairs of face zonelet ids where the fuse operation failed to remove self-intersections in the input. """ args = locals() [FuseResults._default_params.update({ key: value }) for key, value in args.items() if value is not None] @@ -2358,11 +2375,13 @@ def _jsonify(self) -> Dict[str, Any]: json_data["warningCodes"] = [data for data in self._warning_codes] if self._intersecting_locations is not None: json_data["intersectingLocations"] = self._intersecting_locations + if self._intersecting_zonelet_pairs is not None: + json_data["intersectingZoneletPairs"] = [data._jsonify() for data in self._intersecting_zonelet_pairs] [ json_data.update({ utils.to_camel_case(key) : value }) for key, value in self._custom_params.items()] return json_data def __str__(self) -> str: - message = "fused_pairs : %s\nfused_area : %s\nerror_code : %s\nwarning_codes : %s\nintersecting_locations : %s" % (self._fused_pairs, self._fused_area, self._error_code, '[' + ''.join('\n' + str(data) for data in self._warning_codes) + ']', self._intersecting_locations) + message = "fused_pairs : %s\nfused_area : %s\nerror_code : %s\nwarning_codes : %s\nintersecting_locations : %s\nintersecting_zonelet_pairs : %s" % (self._fused_pairs, self._fused_area, self._error_code, '[' + ''.join('\n' + str(data) for data in self._warning_codes) + ']', self._intersecting_locations, '[' + ''.join('\n' + str(data) for data in self._intersecting_zonelet_pairs) + ']') message += ''.join('\n' + str(key) + ' : ' + str(value) for key, value in self._custom_params.items()) return message @@ -2425,3 +2444,15 @@ def intersecting_locations(self) -> Iterable[float]: @intersecting_locations.setter def intersecting_locations(self, value: Iterable[float]): self._intersecting_locations = value + + @property + def intersecting_zonelet_pairs(self) -> List[OverlapPairs]: + """Contains pairs of face zonelet ids where the fuse operation failed to remove self-intersections in the input. + + **This is a beta parameter**. **The behavior and name may change in the future**. + """ + return self._intersecting_zonelet_pairs + + @intersecting_zonelet_pairs.setter + def intersecting_zonelet_pairs(self, value: List[OverlapPairs]): + self._intersecting_zonelet_pairs = value diff --git a/src/ansys/meshing/prime/autogen/controlstructs.py b/src/ansys/meshing/prime/autogen/controlstructs.py index fceab98717..1846472c6d 100644 --- a/src/ansys/meshing/prime/autogen/controlstructs.py +++ b/src/ansys/meshing/prime/autogen/controlstructs.py @@ -34,6 +34,26 @@ class ScopeExpressionType(enum.IntEnum): NAMEPATTERN = 2 """Use name pattern expression to evaluate scope.""" +class SweepType(enum.IntEnum): + """Provides the Sweep Mesh Decomposition type. + """ + STANDARD = 1 + """Multizone create mapped and swept blocks if possible (should be default). + + **This is a beta parameter**. **The behavior and name may change in the future**.""" + THINSWEEP = 2 + """Creates swept blocks on thin wall geometries. + + **This is a beta parameter**. **The behavior and name may change in the future**.""" + MEDIALAXIS = 3 + """Creates swept blocks on axisymmetric sweepable geometries. + + **This is a beta parameter**. **The behavior and name may change in the future**.""" + PROGRAMCONTROLLED = 4 + """Multizone chooses the best decomposition method based on geometry analysis and decomposition tries. + + **This is a beta parameter**. **The behavior and name may change in the future**.""" + class ScopeDefinition(CoreObject): """ScopeDefinition to scope entities based on entity and evaluation type. @@ -1572,6 +1592,10 @@ class MultiZoneSweepMeshParams(CoreObject): thin_sweep: bool, optional Thin sweep option set to True will generate sweep mesh in thin volumes by respecting nDivisions. Thin sweep option set to False will generate sweep mesh whose number of divisions in the direction of sweep is determined by sweepMeshSize. + **This is a beta parameter**. **The behavior and name may change in the future**. + sweep_type: SweepType, optional + Option to specify the sweep mesh decomposition type. + **This is a beta parameter**. **The behavior and name may change in the future**. json_data: dict, optional JSON dictionary to create a ``MultiZoneSweepMeshParams`` object with provided parameters. @@ -1587,11 +1611,13 @@ def __initialize( source_and_target_scope: ScopeDefinition, sweep_mesh_size: float, n_divisions: int, - thin_sweep: bool): + thin_sweep: bool, + sweep_type: SweepType): self._source_and_target_scope = source_and_target_scope self._sweep_mesh_size = sweep_mesh_size self._n_divisions = n_divisions self._thin_sweep = thin_sweep + self._sweep_type = SweepType(sweep_type) def __init__( self, @@ -1600,6 +1626,7 @@ def __init__( sweep_mesh_size: float = None, n_divisions: int = None, thin_sweep: bool = None, + sweep_type: SweepType = None, json_data : dict = None, **kwargs): """Initialize a ``MultiZoneSweepMeshParams`` object. @@ -1623,6 +1650,10 @@ def __init__( thin_sweep: bool, optional Thin sweep option set to True will generate sweep mesh in thin volumes by respecting nDivisions. Thin sweep option set to False will generate sweep mesh whose number of divisions in the direction of sweep is determined by sweepMeshSize. + **This is a beta parameter**. **The behavior and name may change in the future**. + sweep_type: SweepType, optional + Option to specify the sweep mesh decomposition type. + **This is a beta parameter**. **The behavior and name may change in the future**. json_data: dict, optional JSON dictionary to create a ``MultiZoneSweepMeshParams`` object with provided parameters. @@ -1636,15 +1667,17 @@ def __init__( ScopeDefinition(model = model, json_data = json_data["sourceAndTargetScope"] if "sourceAndTargetScope" in json_data else None), json_data["sweepMeshSize"] if "sweepMeshSize" in json_data else None, json_data["nDivisions"] if "nDivisions" in json_data else None, - json_data["thinSweep"] if "thinSweep" in json_data else None) + json_data["thinSweep"] if "thinSweep" in json_data else None, + SweepType(json_data["sweepType"] if "sweepType" in json_data else None)) else: - all_field_specified = all(arg is not None for arg in [source_and_target_scope, sweep_mesh_size, n_divisions, thin_sweep]) + all_field_specified = all(arg is not None for arg in [source_and_target_scope, sweep_mesh_size, n_divisions, thin_sweep, sweep_type]) if all_field_specified: self.__initialize( source_and_target_scope, sweep_mesh_size, n_divisions, - thin_sweep) + thin_sweep, + sweep_type) else: if model is None: raise ValueError("Invalid assignment. Either pass a model or specify all properties.") @@ -1655,7 +1688,8 @@ def __init__( source_and_target_scope if source_and_target_scope is not None else ( MultiZoneSweepMeshParams._default_params["source_and_target_scope"] if "source_and_target_scope" in MultiZoneSweepMeshParams._default_params else ScopeDefinition(model = model, json_data = (json_data["sourceAndTargetScope"] if "sourceAndTargetScope" in json_data else None))), sweep_mesh_size if sweep_mesh_size is not None else ( MultiZoneSweepMeshParams._default_params["sweep_mesh_size"] if "sweep_mesh_size" in MultiZoneSweepMeshParams._default_params else (json_data["sweepMeshSize"] if "sweepMeshSize" in json_data else None)), n_divisions if n_divisions is not None else ( MultiZoneSweepMeshParams._default_params["n_divisions"] if "n_divisions" in MultiZoneSweepMeshParams._default_params else (json_data["nDivisions"] if "nDivisions" in json_data else None)), - thin_sweep if thin_sweep is not None else ( MultiZoneSweepMeshParams._default_params["thin_sweep"] if "thin_sweep" in MultiZoneSweepMeshParams._default_params else (json_data["thinSweep"] if "thinSweep" in json_data else None))) + thin_sweep if thin_sweep is not None else ( MultiZoneSweepMeshParams._default_params["thin_sweep"] if "thin_sweep" in MultiZoneSweepMeshParams._default_params else (json_data["thinSweep"] if "thinSweep" in json_data else None)), + sweep_type if sweep_type is not None else ( MultiZoneSweepMeshParams._default_params["sweep_type"] if "sweep_type" in MultiZoneSweepMeshParams._default_params else SweepType(json_data["sweepType"] if "sweepType" in json_data else None))) self._custom_params = kwargs if model is not None: [ model._logger.warning(f'Unsupported argument : {key}') for key in kwargs ] @@ -1668,7 +1702,8 @@ def set_default( source_and_target_scope: ScopeDefinition = None, sweep_mesh_size: float = None, n_divisions: int = None, - thin_sweep: bool = None): + thin_sweep: bool = None, + sweep_type: SweepType = None): """Set the default values of the ``MultiZoneSweepMeshParams`` object. Parameters @@ -1681,6 +1716,8 @@ def set_default( Number of divisions in the sweep direction. thin_sweep: bool, optional Thin sweep option set to True will generate sweep mesh in thin volumes by respecting nDivisions. Thin sweep option set to False will generate sweep mesh whose number of divisions in the direction of sweep is determined by sweepMeshSize. + sweep_type: SweepType, optional + Option to specify the sweep mesh decomposition type. """ args = locals() [MultiZoneSweepMeshParams._default_params.update({ key: value }) for key, value in args.items() if value is not None] @@ -1707,11 +1744,13 @@ def _jsonify(self) -> Dict[str, Any]: json_data["nDivisions"] = self._n_divisions if self._thin_sweep is not None: json_data["thinSweep"] = self._thin_sweep + if self._sweep_type is not None: + json_data["sweepType"] = self._sweep_type [ json_data.update({ utils.to_camel_case(key) : value }) for key, value in self._custom_params.items()] return json_data def __str__(self) -> str: - message = "source_and_target_scope : %s\nsweep_mesh_size : %s\nn_divisions : %s\nthin_sweep : %s" % ('{ ' + str(self._source_and_target_scope) + ' }', self._sweep_mesh_size, self._n_divisions, self._thin_sweep) + message = "source_and_target_scope : %s\nsweep_mesh_size : %s\nn_divisions : %s\nthin_sweep : %s\nsweep_type : %s" % ('{ ' + str(self._source_and_target_scope) + ' }', self._sweep_mesh_size, self._n_divisions, self._thin_sweep, self._sweep_type) message += ''.join('\n' + str(key) + ' : ' + str(value) for key, value in self._custom_params.items()) return message @@ -1763,6 +1802,18 @@ def thin_sweep(self) -> bool: def thin_sweep(self, value: bool): self._thin_sweep = value + @property + def sweep_type(self) -> SweepType: + """Option to specify the sweep mesh decomposition type. + + **This is a beta parameter**. **The behavior and name may change in the future**. + """ + return self._sweep_type + + @sweep_type.setter + def sweep_type(self, value: SweepType): + self._sweep_type = value + class MultiZoneEdgeBiasingParams(CoreObject): """Defines MultiZone edge biasing control parameters. diff --git a/src/ansys/meshing/prime/autogen/fileio.py b/src/ansys/meshing/prime/autogen/fileio.py index f80608b307..9b2a3ba8cb 100644 --- a/src/ansys/meshing/prime/autogen/fileio.py +++ b/src/ansys/meshing/prime/autogen/fileio.py @@ -113,7 +113,7 @@ def write_pmdat(self, file_name : str, file_write_params : FileWriteParams) -> F return FileWriteResults(model = self._model, json_data = result) def get_abaqus_simulation_data(self, partId : int) -> str: - """ Get Simulation document generated by Abaqus import for a given part. + """ Gets simulation document generated by Abaqus import for a given part. This method will return the JSON Simulation Document for a part if the part exists. Otherwise, it returns an empty string. @@ -190,7 +190,7 @@ def import_abaqus_inp(self, file_name : str, params : ImportAbaqusParams) -> Imp return ImportAbaqusResults(model = self._model, json_data = result) def import_fluent_meshing_size_field(self, file_name : str) -> SizeFieldFileReadResults: - """ Import Fluent-Meshing's sizefield file from disk. + """ Imports Fluent-Meshing's size field file from disk. Fluent-Meshing's sizefield files have sf and sf.gz extension. @@ -511,7 +511,7 @@ def export_fluent_case(self, file_name : str, export_fluent_case_params : Export return FileWriteResults(model = self._model, json_data = result) def export_fluent_meshing_mesh(self, file_name : str, export_fluent_mesh_params : ExportFluentMeshingMeshParams) -> FileWriteResults: - """ Export Fluent Meshing mesh file. Fluent Meshing mesh files have .msh extension. + """ Exports Fluent Meshing mesh file. Fluent Meshing mesh files have .msh extension. Parameters @@ -583,7 +583,7 @@ def export_lsdyna_keyword_file(self, file_name : str, export_params : ExportLSDy return ExportLSDynaResults(model = self._model, json_data = result) def export_lsdyna_iga_keyword_file(self, file_name : str, export_params : ExportLSDynaIgaKeywordFileParams) -> FileWriteResults: - """ Export IGA LS-DYNA Keyword file for solid, surface splines, or both. + """ Exports IGA LS-DYNA Keyword file for solid, surface splines, or both. Parameters @@ -621,7 +621,7 @@ def export_lsdyna_iga_keyword_file(self, file_name : str, export_params : Export return FileWriteResults(model = self._model, json_data = result) def export_boundary_fitted_spline_kfile(self, file_name : str, export_params : ExportBoundaryFittedSplineParams) -> FileWriteResults: - """ Export IGA LS-DYNA Keyword file for boundary fitted spline. + """ Exports IGA LS-DYNA Keyword file for boundary fitted spline. Parameters @@ -694,7 +694,7 @@ def import_cad(self, file_name : str, import_cad_params : ImportCadParams) -> Im return ImportCadResults(model = self._model, json_data = result) def export_stl(self, file_name : str, params : ExportSTLParams) -> FileWriteResults: - """ Export STL file. + """ Exports STL file. Parameters diff --git a/src/ansys/meshing/prime/autogen/fileiostructs.py b/src/ansys/meshing/prime/autogen/fileiostructs.py index 33cba5f9fd..96ad457e0b 100644 --- a/src/ansys/meshing/prime/autogen/fileiostructs.py +++ b/src/ansys/meshing/prime/autogen/fileiostructs.py @@ -82,6 +82,26 @@ class CadRefacetingMaxEdgeSizeLimit(enum.IntEnum): RELATIVE = 2 """Denotes relative maximum edge size limit for CAD faceting.""" +class CdbAnalysisType(enum.IntEnum): + """Provides the MAPDL CDB analysis type. + """ + NONE = 0 + """Option to select no analysis type. This is the default option. + + **This is a beta parameter**. **The behavior and name may change in the future**.""" + OUTERPANELSTIFFNESS = 1 + """Option to select Outer Panel Stiffness as CDB analysis type. + + **This is a beta parameter**. **The behavior and name may change in the future**.""" + BELGIAN = 2 + """Option to select Belgian as CDB analysis type. + + **This is a beta parameter**. **The behavior and name may change in the future**.""" + SEATRETRACTOR = 3 + """Option to select Seat Retractor as CDB analysis type. + + **This is a beta parameter**. **The behavior and name may change in the future**.""" + class CdbSimulationType(enum.IntEnum): """Simulation Type for CDB export. """ @@ -1264,7 +1284,7 @@ class CadRefacetingParams(CoreObject): model: Model Model to create a ``CadRefacetingParams`` object with default parameters. cad_faceter: CadFaceter, optional - Specify the available choices for faceter. The available options are Acis, Parasolid. + Specify the available choices for faceter. The available option is Parasolid. (Note: ACIS faceter is being deprecated from 25R1). faceting_resolution: CadRefacetingResolution, optional Set the faceting resolution. custom_surface_deviation_tolerance: float, optional @@ -1317,7 +1337,7 @@ def __init__( model: Model Model to create a ``CadRefacetingParams`` object with default parameters. cad_faceter: CadFaceter, optional - Specify the available choices for faceter. The available options are Acis, Parasolid. + Specify the available choices for faceter. The available option is Parasolid. (Note: ACIS faceter is being deprecated from 25R1). faceting_resolution: CadRefacetingResolution, optional Set the faceting resolution. custom_surface_deviation_tolerance: float, optional @@ -1386,7 +1406,7 @@ def set_default( Parameters ---------- cad_faceter: CadFaceter, optional - Specify the available choices for faceter. The available options are Acis, Parasolid. + Specify the available choices for faceter. The available option is Parasolid. (Note: ACIS faceter is being deprecated from 25R1). faceting_resolution: CadRefacetingResolution, optional Set the faceting resolution. custom_surface_deviation_tolerance: float, optional @@ -1437,7 +1457,7 @@ def __str__(self) -> str: @property def cad_faceter(self) -> CadFaceter: - """Specify the available choices for faceter. The available options are Acis, Parasolid. + """Specify the available choices for faceter. The available option is Parasolid. (Note: ACIS faceter is being deprecated from 25R1). """ return self._cad_faceter @@ -1505,7 +1525,7 @@ class ImportCadParams(CoreObject): append: bool, optional Append imported CAD into existing model when true. ansys_release: str, optional - Configures the Ansys release to be used for loading CAD data through non Native route. Supported formats for specifying Ansys release version are '24.2', '242', 'v242', '24R2'. + Configures the Ansys release to be used for loading CAD data through non Native route. Supported formats for specifying Ansys release version are '25.1', '251', 'v251', '25R1'. cad_reader_route: CadReaderRoute, optional Specify the available CAD reader routes. The available CAD reader routes are ProgramControlled, Native, WorkBench, SpaceClaim. part_creation_type: PartCreationType, optional @@ -1583,7 +1603,7 @@ def __init__( append: bool, optional Append imported CAD into existing model when true. ansys_release: str, optional - Configures the Ansys release to be used for loading CAD data through non Native route. Supported formats for specifying Ansys release version are '24.2', '242', 'v242', '24R2'. + Configures the Ansys release to be used for loading CAD data through non Native route. Supported formats for specifying Ansys release version are '25.1', '251', 'v251', '25R1'. cad_reader_route: CadReaderRoute, optional Specify the available CAD reader routes. The available CAD reader routes are ProgramControlled, Native, WorkBench, SpaceClaim. part_creation_type: PartCreationType, optional @@ -1682,7 +1702,7 @@ def set_default( append: bool, optional Append imported CAD into existing model when true. ansys_release: str, optional - Configures the Ansys release to be used for loading CAD data through non Native route. Supported formats for specifying Ansys release version are '24.2', '242', 'v242', '24R2'. + Configures the Ansys release to be used for loading CAD data through non Native route. Supported formats for specifying Ansys release version are '25.1', '251', 'v251', '25R1'. cad_reader_route: CadReaderRoute, optional Specify the available CAD reader routes. The available CAD reader routes are ProgramControlled, Native, WorkBench, SpaceClaim. part_creation_type: PartCreationType, optional @@ -1761,7 +1781,7 @@ def append(self, value: bool): @property def ansys_release(self) -> str: - """Configures the Ansys release to be used for loading CAD data through non Native route. Supported formats for specifying Ansys release version are '24.2', '242', 'v242', '24R2'. + """Configures the Ansys release to be used for loading CAD data through non Native route. Supported formats for specifying Ansys release version are '25.1', '251', 'v251', '25R1'. """ return self._ansys_release @@ -2757,6 +2777,219 @@ def error_code(self) -> ErrorCode: def error_code(self, value: ErrorCode): self._error_code = value +class ContactElementTypeParams(CoreObject): + """Parameters to control element type choices for contact surfaces in TIEs and CONTACT PAIRs. + + Parameters + ---------- + model: Model + Model to create a ``ContactElementTypeParams`` object with default parameters. + tie_surf_to_surf: int, optional + Element type for TIE with Surface-to-Surface contact where the contact surface is of type ELEMENT. Default value is 174. The choices are 174 and 175. + + **This is a beta parameter**. **The behavior and name may change in the future**. + tie_node_to_surf: int, optional + Element type for TIE with Node-to-Surface contact where the contact surface is of type ELEMENT. Default value is 175. The choices are 174 and 175. + + **This is a beta parameter**. **The behavior and name may change in the future**. + contact_pair_surf_to_surf: int, optional + Element type for CONTACT PAIR with Surface-to-Surface contact where the contact surface is of type ELEMENT. Default value is 174. The choices are 174 and 175. + + **This is a beta parameter**. **The behavior and name may change in the future**. + contact_pair_node_to_surf: int, optional + Element type for CONTACT PAIR with Node-to-Surface contact where the contact surface is of type ELEMENT. Default value is 174. The choices are 174 and 175. + + **This is a beta parameter**. **The behavior and name may change in the future**. + json_data: dict, optional + JSON dictionary to create a ``ContactElementTypeParams`` object with provided parameters. + + Examples + -------- + >>> contact_element_type_params = prime.ContactElementTypeParams(model = model) + """ + _default_params = {} + + def __initialize( + self, + tie_surf_to_surf: int, + tie_node_to_surf: int, + contact_pair_surf_to_surf: int, + contact_pair_node_to_surf: int): + self._tie_surf_to_surf = tie_surf_to_surf + self._tie_node_to_surf = tie_node_to_surf + self._contact_pair_surf_to_surf = contact_pair_surf_to_surf + self._contact_pair_node_to_surf = contact_pair_node_to_surf + + def __init__( + self, + model: CommunicationManager=None, + tie_surf_to_surf: int = None, + tie_node_to_surf: int = None, + contact_pair_surf_to_surf: int = None, + contact_pair_node_to_surf: int = None, + json_data : dict = None, + **kwargs): + """Initialize a ``ContactElementTypeParams`` object. + + Parameters + ---------- + model: Model + Model to create a ``ContactElementTypeParams`` object with default parameters. + tie_surf_to_surf: int, optional + Element type for TIE with Surface-to-Surface contact where the contact surface is of type ELEMENT. Default value is 174. The choices are 174 and 175. + + **This is a beta parameter**. **The behavior and name may change in the future**. + tie_node_to_surf: int, optional + Element type for TIE with Node-to-Surface contact where the contact surface is of type ELEMENT. Default value is 175. The choices are 174 and 175. + + **This is a beta parameter**. **The behavior and name may change in the future**. + contact_pair_surf_to_surf: int, optional + Element type for CONTACT PAIR with Surface-to-Surface contact where the contact surface is of type ELEMENT. Default value is 174. The choices are 174 and 175. + + **This is a beta parameter**. **The behavior and name may change in the future**. + contact_pair_node_to_surf: int, optional + Element type for CONTACT PAIR with Node-to-Surface contact where the contact surface is of type ELEMENT. Default value is 174. The choices are 174 and 175. + + **This is a beta parameter**. **The behavior and name may change in the future**. + json_data: dict, optional + JSON dictionary to create a ``ContactElementTypeParams`` object with provided parameters. + + Examples + -------- + >>> contact_element_type_params = prime.ContactElementTypeParams(model = model) + """ + if json_data: + self.__initialize( + json_data["tieSurfToSurf"] if "tieSurfToSurf" in json_data else None, + json_data["tieNodeToSurf"] if "tieNodeToSurf" in json_data else None, + json_data["contactPairSurfToSurf"] if "contactPairSurfToSurf" in json_data else None, + json_data["contactPairNodeToSurf"] if "contactPairNodeToSurf" in json_data else None) + else: + all_field_specified = all(arg is not None for arg in [tie_surf_to_surf, tie_node_to_surf, contact_pair_surf_to_surf, contact_pair_node_to_surf]) + if all_field_specified: + self.__initialize( + tie_surf_to_surf, + tie_node_to_surf, + contact_pair_surf_to_surf, + contact_pair_node_to_surf) + else: + if model is None: + raise ValueError("Invalid assignment. Either pass a model or specify all properties.") + else: + param_json = model._communicator.initialize_params(model, "ContactElementTypeParams") + json_data = param_json["ContactElementTypeParams"] if "ContactElementTypeParams" in param_json else {} + self.__initialize( + tie_surf_to_surf if tie_surf_to_surf is not None else ( ContactElementTypeParams._default_params["tie_surf_to_surf"] if "tie_surf_to_surf" in ContactElementTypeParams._default_params else (json_data["tieSurfToSurf"] if "tieSurfToSurf" in json_data else None)), + tie_node_to_surf if tie_node_to_surf is not None else ( ContactElementTypeParams._default_params["tie_node_to_surf"] if "tie_node_to_surf" in ContactElementTypeParams._default_params else (json_data["tieNodeToSurf"] if "tieNodeToSurf" in json_data else None)), + contact_pair_surf_to_surf if contact_pair_surf_to_surf is not None else ( ContactElementTypeParams._default_params["contact_pair_surf_to_surf"] if "contact_pair_surf_to_surf" in ContactElementTypeParams._default_params else (json_data["contactPairSurfToSurf"] if "contactPairSurfToSurf" in json_data else None)), + contact_pair_node_to_surf if contact_pair_node_to_surf is not None else ( ContactElementTypeParams._default_params["contact_pair_node_to_surf"] if "contact_pair_node_to_surf" in ContactElementTypeParams._default_params else (json_data["contactPairNodeToSurf"] if "contactPairNodeToSurf" in json_data else None))) + self._custom_params = kwargs + if model is not None: + [ model._logger.warning(f'Unsupported argument : {key}') for key in kwargs ] + [setattr(type(self), key, property(lambda self, key = key: self._custom_params[key] if key in self._custom_params else None, + lambda self, value, key = key : self._custom_params.update({ key: value }))) for key in kwargs] + self._freeze() + + @staticmethod + def set_default( + tie_surf_to_surf: int = None, + tie_node_to_surf: int = None, + contact_pair_surf_to_surf: int = None, + contact_pair_node_to_surf: int = None): + """Set the default values of the ``ContactElementTypeParams`` object. + + Parameters + ---------- + tie_surf_to_surf: int, optional + Element type for TIE with Surface-to-Surface contact where the contact surface is of type ELEMENT. Default value is 174. The choices are 174 and 175. + tie_node_to_surf: int, optional + Element type for TIE with Node-to-Surface contact where the contact surface is of type ELEMENT. Default value is 175. The choices are 174 and 175. + contact_pair_surf_to_surf: int, optional + Element type for CONTACT PAIR with Surface-to-Surface contact where the contact surface is of type ELEMENT. Default value is 174. The choices are 174 and 175. + contact_pair_node_to_surf: int, optional + Element type for CONTACT PAIR with Node-to-Surface contact where the contact surface is of type ELEMENT. Default value is 174. The choices are 174 and 175. + """ + args = locals() + [ContactElementTypeParams._default_params.update({ key: value }) for key, value in args.items() if value is not None] + + @staticmethod + def print_default(): + """Print the default values of ``ContactElementTypeParams`` object. + + Examples + -------- + >>> ContactElementTypeParams.print_default() + """ + message = "" + message += ''.join(str(key) + ' : ' + str(value) + '\n' for key, value in ContactElementTypeParams._default_params.items()) + print(message) + + def _jsonify(self) -> Dict[str, Any]: + json_data = {} + if self._tie_surf_to_surf is not None: + json_data["tieSurfToSurf"] = self._tie_surf_to_surf + if self._tie_node_to_surf is not None: + json_data["tieNodeToSurf"] = self._tie_node_to_surf + if self._contact_pair_surf_to_surf is not None: + json_data["contactPairSurfToSurf"] = self._contact_pair_surf_to_surf + if self._contact_pair_node_to_surf is not None: + json_data["contactPairNodeToSurf"] = self._contact_pair_node_to_surf + [ json_data.update({ utils.to_camel_case(key) : value }) for key, value in self._custom_params.items()] + return json_data + + def __str__(self) -> str: + message = "tie_surf_to_surf : %s\ntie_node_to_surf : %s\ncontact_pair_surf_to_surf : %s\ncontact_pair_node_to_surf : %s" % (self._tie_surf_to_surf, self._tie_node_to_surf, self._contact_pair_surf_to_surf, self._contact_pair_node_to_surf) + message += ''.join('\n' + str(key) + ' : ' + str(value) for key, value in self._custom_params.items()) + return message + + @property + def tie_surf_to_surf(self) -> int: + """Element type for TIE with Surface-to-Surface contact where the contact surface is of type ELEMENT. Default value is 174. The choices are 174 and 175. + + **This is a beta parameter**. **The behavior and name may change in the future**. + """ + return self._tie_surf_to_surf + + @tie_surf_to_surf.setter + def tie_surf_to_surf(self, value: int): + self._tie_surf_to_surf = value + + @property + def tie_node_to_surf(self) -> int: + """Element type for TIE with Node-to-Surface contact where the contact surface is of type ELEMENT. Default value is 175. The choices are 174 and 175. + + **This is a beta parameter**. **The behavior and name may change in the future**. + """ + return self._tie_node_to_surf + + @tie_node_to_surf.setter + def tie_node_to_surf(self, value: int): + self._tie_node_to_surf = value + + @property + def contact_pair_surf_to_surf(self) -> int: + """Element type for CONTACT PAIR with Surface-to-Surface contact where the contact surface is of type ELEMENT. Default value is 174. The choices are 174 and 175. + + **This is a beta parameter**. **The behavior and name may change in the future**. + """ + return self._contact_pair_surf_to_surf + + @contact_pair_surf_to_surf.setter + def contact_pair_surf_to_surf(self, value: int): + self._contact_pair_surf_to_surf = value + + @property + def contact_pair_node_to_surf(self) -> int: + """Element type for CONTACT PAIR with Node-to-Surface contact where the contact surface is of type ELEMENT. Default value is 174. The choices are 174 and 175. + + **This is a beta parameter**. **The behavior and name may change in the future**. + """ + return self._contact_pair_node_to_surf + + @contact_pair_node_to_surf.setter + def contact_pair_node_to_surf(self, value: int): + self._contact_pair_node_to_surf = value + class ExportMapdlCdbParams(CoreObject): """Parameters to control MAPDL CDB export settings. @@ -2799,6 +3032,10 @@ class ExportMapdlCdbParams(CoreObject): consider_general_connectors_as_spot_weld: bool, optional Option to translate all general connector joints (other than axial) to spot weld type. This is important when nodes are non coincident. + **This is a beta parameter**. **The behavior and name may change in the future**. + analysis_type: CdbAnalysisType, optional + Option to specify CDB analysis type. + **This is a beta parameter**. **The behavior and name may change in the future**. simulation_type: CdbSimulationType, optional Simulation type for the file. @@ -2807,6 +3044,22 @@ class ExportMapdlCdbParams(CoreObject): analysis_settings_file_name: str, optional File path to export mapdl analysis settings. + **This is a beta parameter**. **The behavior and name may change in the future**. + use_compact_format: bool, optional + Option to enable compact format for the cdb blocks. When true, writes a new, more space-efficient cdb format while exporting. The default value is false. + + **This is a beta parameter**. **The behavior and name may change in the future**. + export_fasteners_as_swgen: bool, optional + Option to export fasteners as swgen. When true, translates fasteners into compact swgen blocks in the exported file. The default value is false. + + **This is a beta parameter**. **The behavior and name may change in the future**. + write_thickness_file: bool, optional + Option to write a thickness file for spotweld fatigue analysis. If true, writes a file named [exportedFilename].cdb.thick.txt containing thickness information. + + **This is a beta parameter**. **The behavior and name may change in the future**. + contact_element_types: ContactElementTypeParams, optional + Parameters for choosing element types for contact surfaces in TIEs and CONTACT PAIRs. + **This is a beta parameter**. **The behavior and name may change in the future**. json_data: dict, optional JSON dictionary to create a ``ExportMapdlCdbParams`` object with provided parameters. @@ -2828,8 +3081,13 @@ def __initialize( enable_face_based_labels: bool, write_by_zones: bool, consider_general_connectors_as_spot_weld: bool, + analysis_type: CdbAnalysisType, simulation_type: CdbSimulationType, - analysis_settings_file_name: str): + analysis_settings_file_name: str, + use_compact_format: bool, + export_fasteners_as_swgen: bool, + write_thickness_file: bool, + contact_element_types: ContactElementTypeParams): self._config_settings = config_settings self._pre_solution_settings = pre_solution_settings self._material_properties = material_properties @@ -2839,8 +3097,13 @@ def __initialize( self._enable_face_based_labels = enable_face_based_labels self._write_by_zones = write_by_zones self._consider_general_connectors_as_spot_weld = consider_general_connectors_as_spot_weld + self._analysis_type = CdbAnalysisType(analysis_type) self._simulation_type = CdbSimulationType(simulation_type) self._analysis_settings_file_name = analysis_settings_file_name + self._use_compact_format = use_compact_format + self._export_fasteners_as_swgen = export_fasteners_as_swgen + self._write_thickness_file = write_thickness_file + self._contact_element_types = contact_element_types def __init__( self, @@ -2854,8 +3117,13 @@ def __init__( enable_face_based_labels: bool = None, write_by_zones: bool = None, consider_general_connectors_as_spot_weld: bool = None, + analysis_type: CdbAnalysisType = None, simulation_type: CdbSimulationType = None, analysis_settings_file_name: str = None, + use_compact_format: bool = None, + export_fasteners_as_swgen: bool = None, + write_thickness_file: bool = None, + contact_element_types: ContactElementTypeParams = None, json_data : dict = None, **kwargs): """Initialize a ``ExportMapdlCdbParams`` object. @@ -2899,6 +3167,10 @@ def __init__( consider_general_connectors_as_spot_weld: bool, optional Option to translate all general connector joints (other than axial) to spot weld type. This is important when nodes are non coincident. + **This is a beta parameter**. **The behavior and name may change in the future**. + analysis_type: CdbAnalysisType, optional + Option to specify CDB analysis type. + **This is a beta parameter**. **The behavior and name may change in the future**. simulation_type: CdbSimulationType, optional Simulation type for the file. @@ -2907,6 +3179,22 @@ def __init__( analysis_settings_file_name: str, optional File path to export mapdl analysis settings. + **This is a beta parameter**. **The behavior and name may change in the future**. + use_compact_format: bool, optional + Option to enable compact format for the cdb blocks. When true, writes a new, more space-efficient cdb format while exporting. The default value is false. + + **This is a beta parameter**. **The behavior and name may change in the future**. + export_fasteners_as_swgen: bool, optional + Option to export fasteners as swgen. When true, translates fasteners into compact swgen blocks in the exported file. The default value is false. + + **This is a beta parameter**. **The behavior and name may change in the future**. + write_thickness_file: bool, optional + Option to write a thickness file for spotweld fatigue analysis. If true, writes a file named [exportedFilename].cdb.thick.txt containing thickness information. + + **This is a beta parameter**. **The behavior and name may change in the future**. + contact_element_types: ContactElementTypeParams, optional + Parameters for choosing element types for contact surfaces in TIEs and CONTACT PAIRs. + **This is a beta parameter**. **The behavior and name may change in the future**. json_data: dict, optional JSON dictionary to create a ``ExportMapdlCdbParams`` object with provided parameters. @@ -2926,10 +3214,15 @@ def __init__( json_data["enableFaceBasedLabels"] if "enableFaceBasedLabels" in json_data else None, json_data["writeByZones"] if "writeByZones" in json_data else None, json_data["considerGeneralConnectorsAsSpotWeld"] if "considerGeneralConnectorsAsSpotWeld" in json_data else None, + CdbAnalysisType(json_data["analysisType"] if "analysisType" in json_data else None), CdbSimulationType(json_data["simulationType"] if "simulationType" in json_data else None), - json_data["analysisSettingsFileName"] if "analysisSettingsFileName" in json_data else None) + json_data["analysisSettingsFileName"] if "analysisSettingsFileName" in json_data else None, + json_data["useCompactFormat"] if "useCompactFormat" in json_data else None, + json_data["exportFastenersAsSwgen"] if "exportFastenersAsSwgen" in json_data else None, + json_data["writeThicknessFile"] if "writeThicknessFile" in json_data else None, + ContactElementTypeParams(model = model, json_data = json_data["contactElementTypes"] if "contactElementTypes" in json_data else None)) else: - all_field_specified = all(arg is not None for arg in [config_settings, pre_solution_settings, material_properties, boundary_conditions, analysis_settings, write_cells, enable_face_based_labels, write_by_zones, consider_general_connectors_as_spot_weld, simulation_type, analysis_settings_file_name]) + all_field_specified = all(arg is not None for arg in [config_settings, pre_solution_settings, material_properties, boundary_conditions, analysis_settings, write_cells, enable_face_based_labels, write_by_zones, consider_general_connectors_as_spot_weld, analysis_type, simulation_type, analysis_settings_file_name, use_compact_format, export_fasteners_as_swgen, write_thickness_file, contact_element_types]) if all_field_specified: self.__initialize( config_settings, @@ -2941,8 +3234,13 @@ def __init__( enable_face_based_labels, write_by_zones, consider_general_connectors_as_spot_weld, + analysis_type, simulation_type, - analysis_settings_file_name) + analysis_settings_file_name, + use_compact_format, + export_fasteners_as_swgen, + write_thickness_file, + contact_element_types) else: if model is None: raise ValueError("Invalid assignment. Either pass a model or specify all properties.") @@ -2959,8 +3257,13 @@ def __init__( enable_face_based_labels if enable_face_based_labels is not None else ( ExportMapdlCdbParams._default_params["enable_face_based_labels"] if "enable_face_based_labels" in ExportMapdlCdbParams._default_params else (json_data["enableFaceBasedLabels"] if "enableFaceBasedLabels" in json_data else None)), write_by_zones if write_by_zones is not None else ( ExportMapdlCdbParams._default_params["write_by_zones"] if "write_by_zones" in ExportMapdlCdbParams._default_params else (json_data["writeByZones"] if "writeByZones" in json_data else None)), consider_general_connectors_as_spot_weld if consider_general_connectors_as_spot_weld is not None else ( ExportMapdlCdbParams._default_params["consider_general_connectors_as_spot_weld"] if "consider_general_connectors_as_spot_weld" in ExportMapdlCdbParams._default_params else (json_data["considerGeneralConnectorsAsSpotWeld"] if "considerGeneralConnectorsAsSpotWeld" in json_data else None)), + analysis_type if analysis_type is not None else ( ExportMapdlCdbParams._default_params["analysis_type"] if "analysis_type" in ExportMapdlCdbParams._default_params else CdbAnalysisType(json_data["analysisType"] if "analysisType" in json_data else None)), simulation_type if simulation_type is not None else ( ExportMapdlCdbParams._default_params["simulation_type"] if "simulation_type" in ExportMapdlCdbParams._default_params else CdbSimulationType(json_data["simulationType"] if "simulationType" in json_data else None)), - analysis_settings_file_name if analysis_settings_file_name is not None else ( ExportMapdlCdbParams._default_params["analysis_settings_file_name"] if "analysis_settings_file_name" in ExportMapdlCdbParams._default_params else (json_data["analysisSettingsFileName"] if "analysisSettingsFileName" in json_data else None))) + analysis_settings_file_name if analysis_settings_file_name is not None else ( ExportMapdlCdbParams._default_params["analysis_settings_file_name"] if "analysis_settings_file_name" in ExportMapdlCdbParams._default_params else (json_data["analysisSettingsFileName"] if "analysisSettingsFileName" in json_data else None)), + use_compact_format if use_compact_format is not None else ( ExportMapdlCdbParams._default_params["use_compact_format"] if "use_compact_format" in ExportMapdlCdbParams._default_params else (json_data["useCompactFormat"] if "useCompactFormat" in json_data else None)), + export_fasteners_as_swgen if export_fasteners_as_swgen is not None else ( ExportMapdlCdbParams._default_params["export_fasteners_as_swgen"] if "export_fasteners_as_swgen" in ExportMapdlCdbParams._default_params else (json_data["exportFastenersAsSwgen"] if "exportFastenersAsSwgen" in json_data else None)), + write_thickness_file if write_thickness_file is not None else ( ExportMapdlCdbParams._default_params["write_thickness_file"] if "write_thickness_file" in ExportMapdlCdbParams._default_params else (json_data["writeThicknessFile"] if "writeThicknessFile" in json_data else None)), + contact_element_types if contact_element_types is not None else ( ExportMapdlCdbParams._default_params["contact_element_types"] if "contact_element_types" in ExportMapdlCdbParams._default_params else ContactElementTypeParams(model = model, json_data = (json_data["contactElementTypes"] if "contactElementTypes" in json_data else None)))) self._custom_params = kwargs if model is not None: [ model._logger.warning(f'Unsupported argument : {key}') for key in kwargs ] @@ -2979,8 +3282,13 @@ def set_default( enable_face_based_labels: bool = None, write_by_zones: bool = None, consider_general_connectors_as_spot_weld: bool = None, + analysis_type: CdbAnalysisType = None, simulation_type: CdbSimulationType = None, - analysis_settings_file_name: str = None): + analysis_settings_file_name: str = None, + use_compact_format: bool = None, + export_fasteners_as_swgen: bool = None, + write_thickness_file: bool = None, + contact_element_types: ContactElementTypeParams = None): """Set the default values of the ``ExportMapdlCdbParams`` object. Parameters @@ -3003,10 +3311,20 @@ def set_default( Option to write zones in the file. consider_general_connectors_as_spot_weld: bool, optional Option to translate all general connector joints (other than axial) to spot weld type. This is important when nodes are non coincident. + analysis_type: CdbAnalysisType, optional + Option to specify CDB analysis type. simulation_type: CdbSimulationType, optional Simulation type for the file. analysis_settings_file_name: str, optional File path to export mapdl analysis settings. + use_compact_format: bool, optional + Option to enable compact format for the cdb blocks. When true, writes a new, more space-efficient cdb format while exporting. The default value is false. + export_fasteners_as_swgen: bool, optional + Option to export fasteners as swgen. When true, translates fasteners into compact swgen blocks in the exported file. The default value is false. + write_thickness_file: bool, optional + Option to write a thickness file for spotweld fatigue analysis. If true, writes a file named [exportedFilename].cdb.thick.txt containing thickness information. + contact_element_types: ContactElementTypeParams, optional + Parameters for choosing element types for contact surfaces in TIEs and CONTACT PAIRs. """ args = locals() [ExportMapdlCdbParams._default_params.update({ key: value }) for key, value in args.items() if value is not None] @@ -3043,15 +3361,25 @@ def _jsonify(self) -> Dict[str, Any]: json_data["writeByZones"] = self._write_by_zones if self._consider_general_connectors_as_spot_weld is not None: json_data["considerGeneralConnectorsAsSpotWeld"] = self._consider_general_connectors_as_spot_weld + if self._analysis_type is not None: + json_data["analysisType"] = self._analysis_type if self._simulation_type is not None: json_data["simulationType"] = self._simulation_type if self._analysis_settings_file_name is not None: json_data["analysisSettingsFileName"] = self._analysis_settings_file_name + if self._use_compact_format is not None: + json_data["useCompactFormat"] = self._use_compact_format + if self._export_fasteners_as_swgen is not None: + json_data["exportFastenersAsSwgen"] = self._export_fasteners_as_swgen + if self._write_thickness_file is not None: + json_data["writeThicknessFile"] = self._write_thickness_file + if self._contact_element_types is not None: + json_data["contactElementTypes"] = self._contact_element_types._jsonify() [ json_data.update({ utils.to_camel_case(key) : value }) for key, value in self._custom_params.items()] return json_data def __str__(self) -> str: - message = "config_settings : %s\npre_solution_settings : %s\nmaterial_properties : %s\nboundary_conditions : %s\nanalysis_settings : %s\nwrite_cells : %s\nenable_face_based_labels : %s\nwrite_by_zones : %s\nconsider_general_connectors_as_spot_weld : %s\nsimulation_type : %s\nanalysis_settings_file_name : %s" % (self._config_settings, self._pre_solution_settings, self._material_properties, self._boundary_conditions, self._analysis_settings, self._write_cells, self._enable_face_based_labels, self._write_by_zones, self._consider_general_connectors_as_spot_weld, self._simulation_type, self._analysis_settings_file_name) + message = "config_settings : %s\npre_solution_settings : %s\nmaterial_properties : %s\nboundary_conditions : %s\nanalysis_settings : %s\nwrite_cells : %s\nenable_face_based_labels : %s\nwrite_by_zones : %s\nconsider_general_connectors_as_spot_weld : %s\nanalysis_type : %s\nsimulation_type : %s\nanalysis_settings_file_name : %s\nuse_compact_format : %s\nexport_fasteners_as_swgen : %s\nwrite_thickness_file : %s\ncontact_element_types : %s" % (self._config_settings, self._pre_solution_settings, self._material_properties, self._boundary_conditions, self._analysis_settings, self._write_cells, self._enable_face_based_labels, self._write_by_zones, self._consider_general_connectors_as_spot_weld, self._analysis_type, self._simulation_type, self._analysis_settings_file_name, self._use_compact_format, self._export_fasteners_as_swgen, self._write_thickness_file, '{ ' + str(self._contact_element_types) + ' }') message += ''.join('\n' + str(key) + ' : ' + str(value) for key, value in self._custom_params.items()) return message @@ -3163,6 +3491,18 @@ def consider_general_connectors_as_spot_weld(self) -> bool: def consider_general_connectors_as_spot_weld(self, value: bool): self._consider_general_connectors_as_spot_weld = value + @property + def analysis_type(self) -> CdbAnalysisType: + """Option to specify CDB analysis type. + + **This is a beta parameter**. **The behavior and name may change in the future**. + """ + return self._analysis_type + + @analysis_type.setter + def analysis_type(self, value: CdbAnalysisType): + self._analysis_type = value + @property def simulation_type(self) -> CdbSimulationType: """Simulation type for the file. @@ -3187,6 +3527,54 @@ def analysis_settings_file_name(self) -> str: def analysis_settings_file_name(self, value: str): self._analysis_settings_file_name = value + @property + def use_compact_format(self) -> bool: + """Option to enable compact format for the cdb blocks. When true, writes a new, more space-efficient cdb format while exporting. The default value is false. + + **This is a beta parameter**. **The behavior and name may change in the future**. + """ + return self._use_compact_format + + @use_compact_format.setter + def use_compact_format(self, value: bool): + self._use_compact_format = value + + @property + def export_fasteners_as_swgen(self) -> bool: + """Option to export fasteners as swgen. When true, translates fasteners into compact swgen blocks in the exported file. The default value is false. + + **This is a beta parameter**. **The behavior and name may change in the future**. + """ + return self._export_fasteners_as_swgen + + @export_fasteners_as_swgen.setter + def export_fasteners_as_swgen(self, value: bool): + self._export_fasteners_as_swgen = value + + @property + def write_thickness_file(self) -> bool: + """Option to write a thickness file for spotweld fatigue analysis. If true, writes a file named [exportedFilename].cdb.thick.txt containing thickness information. + + **This is a beta parameter**. **The behavior and name may change in the future**. + """ + return self._write_thickness_file + + @write_thickness_file.setter + def write_thickness_file(self, value: bool): + self._write_thickness_file = value + + @property + def contact_element_types(self) -> ContactElementTypeParams: + """Parameters for choosing element types for contact surfaces in TIEs and CONTACT PAIRs. + + **This is a beta parameter**. **The behavior and name may change in the future**. + """ + return self._contact_element_types + + @contact_element_types.setter + def contact_element_types(self, value: ContactElementTypeParams): + self._contact_element_types = value + class ExportMapdlCdbResults(CoreObject): """Results associated with the MAPDL CDB export. @@ -3389,6 +3777,10 @@ class ExportLSDynaKeywordFileParams(CoreObject): compute_spotweld_thickness: bool, optional Option to compute spot weld thickness using shell thickness when set to true. Else, use search radius as thickness. + **This is a beta parameter**. **The behavior and name may change in the future**. + write_thickness_file: bool, optional + Option to write a thickness file for spotweld fatigue analysis. If true, writes a file named [exportedFilename].k.thick.txt containing thickness information. + **This is a beta parameter**. **The behavior and name may change in the future**. json_data: dict, optional JSON dictionary to create a ``ExportLSDynaKeywordFileParams`` object with provided parameters. @@ -3405,12 +3797,14 @@ def __initialize( database_keywords: str, output_format: LSDynaFileFormatType, analysis_type: LSDynaAnalysisType, - compute_spotweld_thickness: bool): + compute_spotweld_thickness: bool, + write_thickness_file: bool): self._material_properties = material_properties self._database_keywords = database_keywords self._output_format = LSDynaFileFormatType(output_format) self._analysis_type = LSDynaAnalysisType(analysis_type) self._compute_spotweld_thickness = compute_spotweld_thickness + self._write_thickness_file = write_thickness_file def __init__( self, @@ -3420,6 +3814,7 @@ def __init__( output_format: LSDynaFileFormatType = None, analysis_type: LSDynaAnalysisType = None, compute_spotweld_thickness: bool = None, + write_thickness_file: bool = None, json_data : dict = None, **kwargs): """Initialize a ``ExportLSDynaKeywordFileParams`` object. @@ -3447,6 +3842,10 @@ def __init__( compute_spotweld_thickness: bool, optional Option to compute spot weld thickness using shell thickness when set to true. Else, use search radius as thickness. + **This is a beta parameter**. **The behavior and name may change in the future**. + write_thickness_file: bool, optional + Option to write a thickness file for spotweld fatigue analysis. If true, writes a file named [exportedFilename].k.thick.txt containing thickness information. + **This is a beta parameter**. **The behavior and name may change in the future**. json_data: dict, optional JSON dictionary to create a ``ExportLSDynaKeywordFileParams`` object with provided parameters. @@ -3461,16 +3860,18 @@ def __init__( json_data["databaseKeywords"] if "databaseKeywords" in json_data else None, LSDynaFileFormatType(json_data["outputFormat"] if "outputFormat" in json_data else None), LSDynaAnalysisType(json_data["analysisType"] if "analysisType" in json_data else None), - json_data["computeSpotweldThickness"] if "computeSpotweldThickness" in json_data else None) + json_data["computeSpotweldThickness"] if "computeSpotweldThickness" in json_data else None, + json_data["writeThicknessFile"] if "writeThicknessFile" in json_data else None) else: - all_field_specified = all(arg is not None for arg in [material_properties, database_keywords, output_format, analysis_type, compute_spotweld_thickness]) + all_field_specified = all(arg is not None for arg in [material_properties, database_keywords, output_format, analysis_type, compute_spotweld_thickness, write_thickness_file]) if all_field_specified: self.__initialize( material_properties, database_keywords, output_format, analysis_type, - compute_spotweld_thickness) + compute_spotweld_thickness, + write_thickness_file) else: if model is None: raise ValueError("Invalid assignment. Either pass a model or specify all properties.") @@ -3482,7 +3883,8 @@ def __init__( database_keywords if database_keywords is not None else ( ExportLSDynaKeywordFileParams._default_params["database_keywords"] if "database_keywords" in ExportLSDynaKeywordFileParams._default_params else (json_data["databaseKeywords"] if "databaseKeywords" in json_data else None)), output_format if output_format is not None else ( ExportLSDynaKeywordFileParams._default_params["output_format"] if "output_format" in ExportLSDynaKeywordFileParams._default_params else LSDynaFileFormatType(json_data["outputFormat"] if "outputFormat" in json_data else None)), analysis_type if analysis_type is not None else ( ExportLSDynaKeywordFileParams._default_params["analysis_type"] if "analysis_type" in ExportLSDynaKeywordFileParams._default_params else LSDynaAnalysisType(json_data["analysisType"] if "analysisType" in json_data else None)), - compute_spotweld_thickness if compute_spotweld_thickness is not None else ( ExportLSDynaKeywordFileParams._default_params["compute_spotweld_thickness"] if "compute_spotweld_thickness" in ExportLSDynaKeywordFileParams._default_params else (json_data["computeSpotweldThickness"] if "computeSpotweldThickness" in json_data else None))) + compute_spotweld_thickness if compute_spotweld_thickness is not None else ( ExportLSDynaKeywordFileParams._default_params["compute_spotweld_thickness"] if "compute_spotweld_thickness" in ExportLSDynaKeywordFileParams._default_params else (json_data["computeSpotweldThickness"] if "computeSpotweldThickness" in json_data else None)), + write_thickness_file if write_thickness_file is not None else ( ExportLSDynaKeywordFileParams._default_params["write_thickness_file"] if "write_thickness_file" in ExportLSDynaKeywordFileParams._default_params else (json_data["writeThicknessFile"] if "writeThicknessFile" in json_data else None))) self._custom_params = kwargs if model is not None: [ model._logger.warning(f'Unsupported argument : {key}') for key in kwargs ] @@ -3496,7 +3898,8 @@ def set_default( database_keywords: str = None, output_format: LSDynaFileFormatType = None, analysis_type: LSDynaAnalysisType = None, - compute_spotweld_thickness: bool = None): + compute_spotweld_thickness: bool = None, + write_thickness_file: bool = None): """Set the default values of the ``ExportLSDynaKeywordFileParams`` object. Parameters @@ -3511,6 +3914,8 @@ def set_default( Option to specify LS-DYNA analysis type. compute_spotweld_thickness: bool, optional Option to compute spot weld thickness using shell thickness when set to true. Else, use search radius as thickness. + write_thickness_file: bool, optional + Option to write a thickness file for spotweld fatigue analysis. If true, writes a file named [exportedFilename].k.thick.txt containing thickness information. """ args = locals() [ExportLSDynaKeywordFileParams._default_params.update({ key: value }) for key, value in args.items() if value is not None] @@ -3539,11 +3944,13 @@ def _jsonify(self) -> Dict[str, Any]: json_data["analysisType"] = self._analysis_type if self._compute_spotweld_thickness is not None: json_data["computeSpotweldThickness"] = self._compute_spotweld_thickness + if self._write_thickness_file is not None: + json_data["writeThicknessFile"] = self._write_thickness_file [ json_data.update({ utils.to_camel_case(key) : value }) for key, value in self._custom_params.items()] return json_data def __str__(self) -> str: - message = "material_properties : %s\ndatabase_keywords : %s\noutput_format : %s\nanalysis_type : %s\ncompute_spotweld_thickness : %s" % (self._material_properties, self._database_keywords, self._output_format, self._analysis_type, self._compute_spotweld_thickness) + message = "material_properties : %s\ndatabase_keywords : %s\noutput_format : %s\nanalysis_type : %s\ncompute_spotweld_thickness : %s\nwrite_thickness_file : %s" % (self._material_properties, self._database_keywords, self._output_format, self._analysis_type, self._compute_spotweld_thickness, self._write_thickness_file) message += ''.join('\n' + str(key) + ' : ' + str(value) for key, value in self._custom_params.items()) return message @@ -3607,6 +4014,18 @@ def compute_spotweld_thickness(self) -> bool: def compute_spotweld_thickness(self, value: bool): self._compute_spotweld_thickness = value + @property + def write_thickness_file(self) -> bool: + """Option to write a thickness file for spotweld fatigue analysis. If true, writes a file named [exportedFilename].k.thick.txt containing thickness information. + + **This is a beta parameter**. **The behavior and name may change in the future**. + """ + return self._write_thickness_file + + @write_thickness_file.setter + def write_thickness_file(self, value: bool): + self._write_thickness_file = value + class ExportLSDynaResults(CoreObject): """Results associated with the LS-DYNA export. diff --git a/src/ansys/meshing/prime/autogen/materialpointmanager.py b/src/ansys/meshing/prime/autogen/materialpointmanager.py index 02f003e052..7a5c0a1ff6 100644 --- a/src/ansys/meshing/prime/autogen/materialpointmanager.py +++ b/src/ansys/meshing/prime/autogen/materialpointmanager.py @@ -6,7 +6,7 @@ from typing import List, Any, Union class MaterialPointManager(CoreObject): - """Provides functions for material point creation, deletion and queries. + """Provide functions for material point creation, deletion and queries. Parameters ---------- @@ -36,11 +36,11 @@ def create_material_point(self, suggested_name : str, coords : Iterable[float], Parameters ---------- suggested_name : str - A name suggestion for material point. + Name suggested for the material point. coords : Iterable[float] Coordinates of material point. params : CreateMaterialPointParams - Parameters to material point. + Parameters used to create material point. Returns ------- diff --git a/src/ansys/meshing/prime/autogen/model.py b/src/ansys/meshing/prime/autogen/model.py index f233bd41bb..8f8bbb7df3 100644 --- a/src/ansys/meshing/prime/autogen/model.py +++ b/src/ansys/meshing/prime/autogen/model.py @@ -73,7 +73,7 @@ def delete_parts(self, part_ids : Iterable[int]) -> DeleteResults: return DeleteResults(model = self, json_data = result) def merge_parts(self, part_ids : Iterable[int], params : MergePartsParams) -> MergePartsResults: - """ Merges given parts into one. + """ Merge given parts into one. Parameters @@ -108,7 +108,7 @@ def merge_parts(self, part_ids : Iterable[int], params : MergePartsParams) -> Me return MergePartsResults(model = self, json_data = result) def set_global_sizing_params(self, params : GlobalSizingParams) -> SetSizingResults: - """ Sets the global sizing parameters to initialize surfer parameters and various size control parameters. + """ Set the global sizing parameters to initialize surfer parameters and various size control parameters. Parameters @@ -203,7 +203,7 @@ def activate_volumetric_size_fields(self, size_field_ids : Iterable[int]): self._print_logs_after_command("activate_volumetric_size_fields") def deactivate_volumetric_size_fields(self, size_field_ids : Iterable[int]): - """ Deactivate the sizefields identified by the given sizefield ids. + """ Deactivate the size fields with the given size field ids. Parameters @@ -294,6 +294,29 @@ def get_num_threads(self) -> int: self._print_logs_after_command("get_num_threads") return result + def get_num_compute_nodes(self) -> int: + """ Get the number of compute nodes. + + + Returns + ------- + int + Returns the number of compute nodes. + + + Examples + -------- + >>> model = client.model + >>> num_compute_nodes = model.get_num_compute_nodes() + + """ + args = {} + command_name = "PrimeMesh::Model/GetNumComputeNodes" + self._print_logs_before_command("get_num_compute_nodes", args) + result = self._comm.serve(self, command_name, self._object_id, args=args) + self._print_logs_after_command("get_num_compute_nodes") + return result + def start_distributed_meshing(self): """ Enables distributed meshing mode. @@ -348,7 +371,7 @@ def create_zone(self, suggested_name : str, type : ZoneType) -> CreateZoneResult return CreateZoneResults(model = self, json_data = result) def delete_zone(self, zone_id : int) -> DeleteZoneResults: - """ Deletes zone identified with the given id. + """ Deletes zone with the given id. Parameters @@ -379,7 +402,7 @@ def delete_zone(self, zone_id : int) -> DeleteZoneResults: return DeleteZoneResults(model = self, json_data = result) def get_zone_by_name(self, zone_name : str) -> int: - """ Gets the zone by name. + """ Gets the zone with the provided name. Parameters @@ -421,7 +444,7 @@ def get_zone_name(self, id : int) -> str: Returns ------- str - Return the name. Return empty name if the id is invalid. + Return the zone name. Return empty name if the id is invalid. Examples @@ -441,7 +464,7 @@ def get_zone_name(self, id : int) -> str: return result def set_suggested_zone_name(self, id : int, name : str) -> SetNameResults: - """ Sets the unique name for zone with given id based on the given suggested name. + """ Sets the unique name for zone with given id based on the suggested name. Parameters @@ -476,6 +499,35 @@ def set_suggested_zone_name(self, id : int, name : str) -> SetNameResults: self._print_logs_after_command("set_suggested_zone_name", SetNameResults(model = self, json_data = result)) return SetNameResults(model = self, json_data = result) + def set_working_directory(self, path : str): + """ Set working directory. + + Set working directory to be considered for file i/o when file paths are relative. + + Parameters + ---------- + path : str + Path to the directory. + + Notes + ----- + **This is a beta API**. **The behavior and implementation may change in future**. + + Examples + -------- + >>> model = prime.local_model + >>> zones = model.set_working_directory("C:/input_files") + + """ + if not isinstance(path, str): + raise TypeError("Invalid argument type passed for 'path'. Valid argument type is str.") + args = {"path" : path} + command_name = "PrimeMesh::Model/SetWorkingDirectory" + self._print_beta_api_warning("set_working_directory") + self._print_logs_before_command("set_working_directory", args) + self._comm.serve(self, command_name, self._object_id, args=args) + self._print_logs_after_command("set_working_directory") + @property def id(self): """ Get the id of Model.""" diff --git a/src/ansys/meshing/prime/autogen/morpher.py b/src/ansys/meshing/prime/autogen/morpher.py index 259a6f245a..daa1f31360 100644 --- a/src/ansys/meshing/prime/autogen/morpher.py +++ b/src/ansys/meshing/prime/autogen/morpher.py @@ -34,7 +34,7 @@ def __exit__(self, type, value, traceback) : self._comm.serve(self._model, command_name, self._object_id, args={}) def match_morph(self, part_id : int, match_pairs : List[MatchPair], match_morph_params : MatchMorphParams, bc_params : MorphBCParams, solve_params : MorphSolveParams) -> MatchMorphResults: - """ Match source and target zonelets defined by match pairs with prescribed boundary conditions for each pair. Also, solves boundary condition parameters to define uniform surface and volume regions in source neighborhood. + """ Match source and target zonelets defined by match pairs with prescribed boundary conditions for each pair. Also, solve boundary condition parameters to define uniform surface and volume regions in source neighborhood. Parameters diff --git a/src/ansys/meshing/prime/autogen/multizonecontrol.py b/src/ansys/meshing/prime/autogen/multizonecontrol.py index b23f7b87da..408c3edac4 100644 --- a/src/ansys/meshing/prime/autogen/multizonecontrol.py +++ b/src/ansys/meshing/prime/autogen/multizonecontrol.py @@ -87,7 +87,7 @@ def set_surface_scope(self, scope_info : ScopeDefinition): self._model._print_logs_after_command("set_surface_scope") def set_map_mesh_params(self, scope_info : MultiZoneMapMeshParams): - """ Sets the parameters for map meshing in terms of topofaces during MultiZone mesh. + """ Set the parameters for map meshing in terms of topofaces during MultiZone mesh. Parameters @@ -114,7 +114,7 @@ def set_map_mesh_params(self, scope_info : MultiZoneMapMeshParams): self._model._print_logs_after_command("set_map_mesh_params") def set_sweep_mesh_params(self, scope_info : MultiZoneSweepMeshParams): - """ Sets the parameters for sweep meshing in terms of topofaces during MultiZone mesh. + """ Set the parameters for sweep meshing in terms of topofaces during MultiZone mesh. Parameters diff --git a/src/ansys/meshing/prime/autogen/part.py b/src/ansys/meshing/prime/autogen/part.py index e924a79d59..a2ad4cd47b 100644 --- a/src/ansys/meshing/prime/autogen/part.py +++ b/src/ansys/meshing/prime/autogen/part.py @@ -1424,6 +1424,35 @@ def get_labels(self) -> List[str]: self._model._print_logs_after_command("get_labels") return result + def get_labels_on_zonelet(self, zonelet_id : int) -> List[str]: + """ Gets labels associated with zonelet. + + + Parameters + ---------- + zonelet_id : int + Id of zonelet for which label is queried. + + Returns + ------- + List[str] + Returns labels associated with zonelet. + + + Examples + -------- + >>> results = part.get_labels_on_zonelet(zonelet_id) + + """ + if not isinstance(zonelet_id, int): + raise TypeError("Invalid argument type passed for 'zonelet_id'. Valid argument type is int.") + args = {"zonelet_id" : zonelet_id} + command_name = "PrimeMesh::Part/GetLabelsOnZonelet" + self._model._print_logs_before_command("get_labels_on_zonelet", args) + result = self._comm.serve(self._model, command_name, self._object_id, args=args) + self._model._print_logs_after_command("get_labels_on_zonelet") + return result + def delete_topo_entities(self, params : DeleteTopoEntitiesParams) -> DeleteTopoEntitiesResults: """ Delete topoentities of part controled by parameters. diff --git a/src/ansys/meshing/prime/autogen/partstructs.py b/src/ansys/meshing/prime/autogen/partstructs.py index 1e065d3476..04ff1a1c19 100644 --- a/src/ansys/meshing/prime/autogen/partstructs.py +++ b/src/ansys/meshing/prime/autogen/partstructs.py @@ -2158,6 +2158,10 @@ class PartSummaryResults(CoreObject): Number of polygonal faces. n_quad_faces: int, optional Number of quadrilateral faces. + n_second_order_tri_faces: int, optional + Number of second order triangular faces. + n_second_order_quad_faces: int, optional + Number of second order quadrilateral faces. n_tet_cells: int, optional Number of tetrahedral cells. n_pyra_cells: int, optional @@ -2168,6 +2172,14 @@ class PartSummaryResults(CoreObject): Number of polyhedral cells. n_hex_cells: int, optional Number of hexahedral cells. + n_second_order_tet_cells: int, optional + Number of second order tetrahedral cells. + n_second_order_pyra_cells: int, optional + Number of second order pyramid cells. + n_second_order_prism_cells: int, optional + Number of second order prism cells. + n_second_order_hex_cells: int, optional + Number of second order hexahedral cells. n_unmeshed_topo_faces: int, optional Number of unmeshed topofaces. json_data: dict, optional @@ -2198,11 +2210,17 @@ def __initialize( n_tri_faces: int, n_poly_faces: int, n_quad_faces: int, + n_second_order_tri_faces: int, + n_second_order_quad_faces: int, n_tet_cells: int, n_pyra_cells: int, n_prism_cells: int, n_poly_cells: int, n_hex_cells: int, + n_second_order_tet_cells: int, + n_second_order_pyra_cells: int, + n_second_order_prism_cells: int, + n_second_order_hex_cells: int, n_unmeshed_topo_faces: int): self._message = message self._n_topo_edges = n_topo_edges @@ -2221,11 +2239,17 @@ def __initialize( self._n_tri_faces = n_tri_faces self._n_poly_faces = n_poly_faces self._n_quad_faces = n_quad_faces + self._n_second_order_tri_faces = n_second_order_tri_faces + self._n_second_order_quad_faces = n_second_order_quad_faces self._n_tet_cells = n_tet_cells self._n_pyra_cells = n_pyra_cells self._n_prism_cells = n_prism_cells self._n_poly_cells = n_poly_cells self._n_hex_cells = n_hex_cells + self._n_second_order_tet_cells = n_second_order_tet_cells + self._n_second_order_pyra_cells = n_second_order_pyra_cells + self._n_second_order_prism_cells = n_second_order_prism_cells + self._n_second_order_hex_cells = n_second_order_hex_cells self._n_unmeshed_topo_faces = n_unmeshed_topo_faces def __init__( @@ -2248,11 +2272,17 @@ def __init__( n_tri_faces: int = None, n_poly_faces: int = None, n_quad_faces: int = None, + n_second_order_tri_faces: int = None, + n_second_order_quad_faces: int = None, n_tet_cells: int = None, n_pyra_cells: int = None, n_prism_cells: int = None, n_poly_cells: int = None, n_hex_cells: int = None, + n_second_order_tet_cells: int = None, + n_second_order_pyra_cells: int = None, + n_second_order_prism_cells: int = None, + n_second_order_hex_cells: int = None, n_unmeshed_topo_faces: int = None, json_data : dict = None, **kwargs): @@ -2296,6 +2326,10 @@ def __init__( Number of polygonal faces. n_quad_faces: int, optional Number of quadrilateral faces. + n_second_order_tri_faces: int, optional + Number of second order triangular faces. + n_second_order_quad_faces: int, optional + Number of second order quadrilateral faces. n_tet_cells: int, optional Number of tetrahedral cells. n_pyra_cells: int, optional @@ -2306,6 +2340,14 @@ def __init__( Number of polyhedral cells. n_hex_cells: int, optional Number of hexahedral cells. + n_second_order_tet_cells: int, optional + Number of second order tetrahedral cells. + n_second_order_pyra_cells: int, optional + Number of second order pyramid cells. + n_second_order_prism_cells: int, optional + Number of second order prism cells. + n_second_order_hex_cells: int, optional + Number of second order hexahedral cells. n_unmeshed_topo_faces: int, optional Number of unmeshed topofaces. json_data: dict, optional @@ -2334,14 +2376,20 @@ def __init__( json_data["nTriFaces"] if "nTriFaces" in json_data else None, json_data["nPolyFaces"] if "nPolyFaces" in json_data else None, json_data["nQuadFaces"] if "nQuadFaces" in json_data else None, + json_data["nSecondOrderTriFaces"] if "nSecondOrderTriFaces" in json_data else None, + json_data["nSecondOrderQuadFaces"] if "nSecondOrderQuadFaces" in json_data else None, json_data["nTetCells"] if "nTetCells" in json_data else None, json_data["nPyraCells"] if "nPyraCells" in json_data else None, json_data["nPrismCells"] if "nPrismCells" in json_data else None, json_data["nPolyCells"] if "nPolyCells" in json_data else None, json_data["nHexCells"] if "nHexCells" in json_data else None, + json_data["nSecondOrderTetCells"] if "nSecondOrderTetCells" in json_data else None, + json_data["nSecondOrderPyraCells"] if "nSecondOrderPyraCells" in json_data else None, + json_data["nSecondOrderPrismCells"] if "nSecondOrderPrismCells" in json_data else None, + json_data["nSecondOrderHexCells"] if "nSecondOrderHexCells" in json_data else None, json_data["nUnmeshedTopoFaces"] if "nUnmeshedTopoFaces" in json_data else None) else: - all_field_specified = all(arg is not None for arg in [message, n_topo_edges, n_topo_faces, n_topo_volumes, n_edge_zonelets, n_face_zonelets, n_cell_zonelets, n_edge_zones, n_face_zones, n_volume_zones, n_labels, n_nodes, n_faces, n_cells, n_tri_faces, n_poly_faces, n_quad_faces, n_tet_cells, n_pyra_cells, n_prism_cells, n_poly_cells, n_hex_cells, n_unmeshed_topo_faces]) + all_field_specified = all(arg is not None for arg in [message, n_topo_edges, n_topo_faces, n_topo_volumes, n_edge_zonelets, n_face_zonelets, n_cell_zonelets, n_edge_zones, n_face_zones, n_volume_zones, n_labels, n_nodes, n_faces, n_cells, n_tri_faces, n_poly_faces, n_quad_faces, n_second_order_tri_faces, n_second_order_quad_faces, n_tet_cells, n_pyra_cells, n_prism_cells, n_poly_cells, n_hex_cells, n_second_order_tet_cells, n_second_order_pyra_cells, n_second_order_prism_cells, n_second_order_hex_cells, n_unmeshed_topo_faces]) if all_field_specified: self.__initialize( message, @@ -2361,11 +2409,17 @@ def __init__( n_tri_faces, n_poly_faces, n_quad_faces, + n_second_order_tri_faces, + n_second_order_quad_faces, n_tet_cells, n_pyra_cells, n_prism_cells, n_poly_cells, n_hex_cells, + n_second_order_tet_cells, + n_second_order_pyra_cells, + n_second_order_prism_cells, + n_second_order_hex_cells, n_unmeshed_topo_faces) else: if model is None: @@ -2391,11 +2445,17 @@ def __init__( n_tri_faces if n_tri_faces is not None else ( PartSummaryResults._default_params["n_tri_faces"] if "n_tri_faces" in PartSummaryResults._default_params else (json_data["nTriFaces"] if "nTriFaces" in json_data else None)), n_poly_faces if n_poly_faces is not None else ( PartSummaryResults._default_params["n_poly_faces"] if "n_poly_faces" in PartSummaryResults._default_params else (json_data["nPolyFaces"] if "nPolyFaces" in json_data else None)), n_quad_faces if n_quad_faces is not None else ( PartSummaryResults._default_params["n_quad_faces"] if "n_quad_faces" in PartSummaryResults._default_params else (json_data["nQuadFaces"] if "nQuadFaces" in json_data else None)), + n_second_order_tri_faces if n_second_order_tri_faces is not None else ( PartSummaryResults._default_params["n_second_order_tri_faces"] if "n_second_order_tri_faces" in PartSummaryResults._default_params else (json_data["nSecondOrderTriFaces"] if "nSecondOrderTriFaces" in json_data else None)), + n_second_order_quad_faces if n_second_order_quad_faces is not None else ( PartSummaryResults._default_params["n_second_order_quad_faces"] if "n_second_order_quad_faces" in PartSummaryResults._default_params else (json_data["nSecondOrderQuadFaces"] if "nSecondOrderQuadFaces" in json_data else None)), n_tet_cells if n_tet_cells is not None else ( PartSummaryResults._default_params["n_tet_cells"] if "n_tet_cells" in PartSummaryResults._default_params else (json_data["nTetCells"] if "nTetCells" in json_data else None)), n_pyra_cells if n_pyra_cells is not None else ( PartSummaryResults._default_params["n_pyra_cells"] if "n_pyra_cells" in PartSummaryResults._default_params else (json_data["nPyraCells"] if "nPyraCells" in json_data else None)), n_prism_cells if n_prism_cells is not None else ( PartSummaryResults._default_params["n_prism_cells"] if "n_prism_cells" in PartSummaryResults._default_params else (json_data["nPrismCells"] if "nPrismCells" in json_data else None)), n_poly_cells if n_poly_cells is not None else ( PartSummaryResults._default_params["n_poly_cells"] if "n_poly_cells" in PartSummaryResults._default_params else (json_data["nPolyCells"] if "nPolyCells" in json_data else None)), n_hex_cells if n_hex_cells is not None else ( PartSummaryResults._default_params["n_hex_cells"] if "n_hex_cells" in PartSummaryResults._default_params else (json_data["nHexCells"] if "nHexCells" in json_data else None)), + n_second_order_tet_cells if n_second_order_tet_cells is not None else ( PartSummaryResults._default_params["n_second_order_tet_cells"] if "n_second_order_tet_cells" in PartSummaryResults._default_params else (json_data["nSecondOrderTetCells"] if "nSecondOrderTetCells" in json_data else None)), + n_second_order_pyra_cells if n_second_order_pyra_cells is not None else ( PartSummaryResults._default_params["n_second_order_pyra_cells"] if "n_second_order_pyra_cells" in PartSummaryResults._default_params else (json_data["nSecondOrderPyraCells"] if "nSecondOrderPyraCells" in json_data else None)), + n_second_order_prism_cells if n_second_order_prism_cells is not None else ( PartSummaryResults._default_params["n_second_order_prism_cells"] if "n_second_order_prism_cells" in PartSummaryResults._default_params else (json_data["nSecondOrderPrismCells"] if "nSecondOrderPrismCells" in json_data else None)), + n_second_order_hex_cells if n_second_order_hex_cells is not None else ( PartSummaryResults._default_params["n_second_order_hex_cells"] if "n_second_order_hex_cells" in PartSummaryResults._default_params else (json_data["nSecondOrderHexCells"] if "nSecondOrderHexCells" in json_data else None)), n_unmeshed_topo_faces if n_unmeshed_topo_faces is not None else ( PartSummaryResults._default_params["n_unmeshed_topo_faces"] if "n_unmeshed_topo_faces" in PartSummaryResults._default_params else (json_data["nUnmeshedTopoFaces"] if "nUnmeshedTopoFaces" in json_data else None))) self._custom_params = kwargs if model is not None: @@ -2423,11 +2483,17 @@ def set_default( n_tri_faces: int = None, n_poly_faces: int = None, n_quad_faces: int = None, + n_second_order_tri_faces: int = None, + n_second_order_quad_faces: int = None, n_tet_cells: int = None, n_pyra_cells: int = None, n_prism_cells: int = None, n_poly_cells: int = None, n_hex_cells: int = None, + n_second_order_tet_cells: int = None, + n_second_order_pyra_cells: int = None, + n_second_order_prism_cells: int = None, + n_second_order_hex_cells: int = None, n_unmeshed_topo_faces: int = None): """Set the default values of the ``PartSummaryResults`` object. @@ -2467,6 +2533,10 @@ def set_default( Number of polygonal faces. n_quad_faces: int, optional Number of quadrilateral faces. + n_second_order_tri_faces: int, optional + Number of second order triangular faces. + n_second_order_quad_faces: int, optional + Number of second order quadrilateral faces. n_tet_cells: int, optional Number of tetrahedral cells. n_pyra_cells: int, optional @@ -2477,6 +2547,14 @@ def set_default( Number of polyhedral cells. n_hex_cells: int, optional Number of hexahedral cells. + n_second_order_tet_cells: int, optional + Number of second order tetrahedral cells. + n_second_order_pyra_cells: int, optional + Number of second order pyramid cells. + n_second_order_prism_cells: int, optional + Number of second order prism cells. + n_second_order_hex_cells: int, optional + Number of second order hexahedral cells. n_unmeshed_topo_faces: int, optional Number of unmeshed topofaces. """ @@ -2531,6 +2609,10 @@ def _jsonify(self) -> Dict[str, Any]: json_data["nPolyFaces"] = self._n_poly_faces if self._n_quad_faces is not None: json_data["nQuadFaces"] = self._n_quad_faces + if self._n_second_order_tri_faces is not None: + json_data["nSecondOrderTriFaces"] = self._n_second_order_tri_faces + if self._n_second_order_quad_faces is not None: + json_data["nSecondOrderQuadFaces"] = self._n_second_order_quad_faces if self._n_tet_cells is not None: json_data["nTetCells"] = self._n_tet_cells if self._n_pyra_cells is not None: @@ -2541,13 +2623,21 @@ def _jsonify(self) -> Dict[str, Any]: json_data["nPolyCells"] = self._n_poly_cells if self._n_hex_cells is not None: json_data["nHexCells"] = self._n_hex_cells + if self._n_second_order_tet_cells is not None: + json_data["nSecondOrderTetCells"] = self._n_second_order_tet_cells + if self._n_second_order_pyra_cells is not None: + json_data["nSecondOrderPyraCells"] = self._n_second_order_pyra_cells + if self._n_second_order_prism_cells is not None: + json_data["nSecondOrderPrismCells"] = self._n_second_order_prism_cells + if self._n_second_order_hex_cells is not None: + json_data["nSecondOrderHexCells"] = self._n_second_order_hex_cells if self._n_unmeshed_topo_faces is not None: json_data["nUnmeshedTopoFaces"] = self._n_unmeshed_topo_faces [ json_data.update({ utils.to_camel_case(key) : value }) for key, value in self._custom_params.items()] return json_data def __str__(self) -> str: - message = "message : %s\nn_topo_edges : %s\nn_topo_faces : %s\nn_topo_volumes : %s\nn_edge_zonelets : %s\nn_face_zonelets : %s\nn_cell_zonelets : %s\nn_edge_zones : %s\nn_face_zones : %s\nn_volume_zones : %s\nn_labels : %s\nn_nodes : %s\nn_faces : %s\nn_cells : %s\nn_tri_faces : %s\nn_poly_faces : %s\nn_quad_faces : %s\nn_tet_cells : %s\nn_pyra_cells : %s\nn_prism_cells : %s\nn_poly_cells : %s\nn_hex_cells : %s\nn_unmeshed_topo_faces : %s" % (self._message, self._n_topo_edges, self._n_topo_faces, self._n_topo_volumes, self._n_edge_zonelets, self._n_face_zonelets, self._n_cell_zonelets, self._n_edge_zones, self._n_face_zones, self._n_volume_zones, self._n_labels, self._n_nodes, self._n_faces, self._n_cells, self._n_tri_faces, self._n_poly_faces, self._n_quad_faces, self._n_tet_cells, self._n_pyra_cells, self._n_prism_cells, self._n_poly_cells, self._n_hex_cells, self._n_unmeshed_topo_faces) + message = "message : %s\nn_topo_edges : %s\nn_topo_faces : %s\nn_topo_volumes : %s\nn_edge_zonelets : %s\nn_face_zonelets : %s\nn_cell_zonelets : %s\nn_edge_zones : %s\nn_face_zones : %s\nn_volume_zones : %s\nn_labels : %s\nn_nodes : %s\nn_faces : %s\nn_cells : %s\nn_tri_faces : %s\nn_poly_faces : %s\nn_quad_faces : %s\nn_second_order_tri_faces : %s\nn_second_order_quad_faces : %s\nn_tet_cells : %s\nn_pyra_cells : %s\nn_prism_cells : %s\nn_poly_cells : %s\nn_hex_cells : %s\nn_second_order_tet_cells : %s\nn_second_order_pyra_cells : %s\nn_second_order_prism_cells : %s\nn_second_order_hex_cells : %s\nn_unmeshed_topo_faces : %s" % (self._message, self._n_topo_edges, self._n_topo_faces, self._n_topo_volumes, self._n_edge_zonelets, self._n_face_zonelets, self._n_cell_zonelets, self._n_edge_zones, self._n_face_zones, self._n_volume_zones, self._n_labels, self._n_nodes, self._n_faces, self._n_cells, self._n_tri_faces, self._n_poly_faces, self._n_quad_faces, self._n_second_order_tri_faces, self._n_second_order_quad_faces, self._n_tet_cells, self._n_pyra_cells, self._n_prism_cells, self._n_poly_cells, self._n_hex_cells, self._n_second_order_tet_cells, self._n_second_order_pyra_cells, self._n_second_order_prism_cells, self._n_second_order_hex_cells, self._n_unmeshed_topo_faces) message += ''.join('\n' + str(key) + ' : ' + str(value) for key, value in self._custom_params.items()) return message @@ -2721,6 +2811,26 @@ def n_quad_faces(self) -> int: def n_quad_faces(self, value: int): self._n_quad_faces = value + @property + def n_second_order_tri_faces(self) -> int: + """Number of second order triangular faces. + """ + return self._n_second_order_tri_faces + + @n_second_order_tri_faces.setter + def n_second_order_tri_faces(self, value: int): + self._n_second_order_tri_faces = value + + @property + def n_second_order_quad_faces(self) -> int: + """Number of second order quadrilateral faces. + """ + return self._n_second_order_quad_faces + + @n_second_order_quad_faces.setter + def n_second_order_quad_faces(self, value: int): + self._n_second_order_quad_faces = value + @property def n_tet_cells(self) -> int: """Number of tetrahedral cells. @@ -2771,6 +2881,46 @@ def n_hex_cells(self) -> int: def n_hex_cells(self, value: int): self._n_hex_cells = value + @property + def n_second_order_tet_cells(self) -> int: + """Number of second order tetrahedral cells. + """ + return self._n_second_order_tet_cells + + @n_second_order_tet_cells.setter + def n_second_order_tet_cells(self, value: int): + self._n_second_order_tet_cells = value + + @property + def n_second_order_pyra_cells(self) -> int: + """Number of second order pyramid cells. + """ + return self._n_second_order_pyra_cells + + @n_second_order_pyra_cells.setter + def n_second_order_pyra_cells(self, value: int): + self._n_second_order_pyra_cells = value + + @property + def n_second_order_prism_cells(self) -> int: + """Number of second order prism cells. + """ + return self._n_second_order_prism_cells + + @n_second_order_prism_cells.setter + def n_second_order_prism_cells(self, value: int): + self._n_second_order_prism_cells = value + + @property + def n_second_order_hex_cells(self) -> int: + """Number of second order hexahedral cells. + """ + return self._n_second_order_hex_cells + + @n_second_order_hex_cells.setter + def n_second_order_hex_cells(self, value: int): + self._n_second_order_hex_cells = value + @property def n_unmeshed_topo_faces(self) -> int: """Number of unmeshed topofaces. diff --git a/src/ansys/meshing/prime/autogen/primeconfig.py b/src/ansys/meshing/prime/autogen/primeconfig.py index b3e873f2e5..52090ce9d3 100644 --- a/src/ansys/meshing/prime/autogen/primeconfig.py +++ b/src/ansys/meshing/prime/autogen/primeconfig.py @@ -36,6 +36,8 @@ class ErrorCode(enum.IntEnum): """Auto sizing must be of volumetric type.""" SURFERNONMANIFOLDEDGE = 14 """Non manifold edge for meshing.""" + SURFEROPENINITIALFRONTLOOP = 16 + """Open initial front loop for meshing.""" FREEZEMESHERROR = 30 """Cannot remesh freezed mesh.""" REMESHFACEZONELETSNOTSUPPORTEDFORTOPOLOGYPART = 31 @@ -298,6 +300,10 @@ class ErrorCode(enum.IntEnum): """Invalid tolerance value specified.""" SOURCEORTARGETNOTSPECIFIED = 857 """No target or source faces specified.""" + STITCHWITHPRESERVEDENTITIESFAILED = 858 + """Stitch with preserved entities failed.""" + STITCHENTITIESFAILED = 859 + """Stitch entities failed.""" NOTSUPPORTEDFORTOPOLOGYPART = 1200 """Not supported for part with topology data.""" NOTSUPPORTEDFORHIGHERORDERMESHPART = 1201 @@ -488,6 +494,10 @@ class ErrorCode(enum.IntEnum): """Material point with the given name does not exist.""" MATERIALPOINTWITHGIVENIDDOESNTEXIST = 3302 """Material point with the given ID already exists.""" + OCTREELIMITREACHED = 3350 + """Limit reached for the number of octants supported. + + **This is a beta parameter**. **The behavior and name may change in the future**.""" WRAPPERGLOBALSETTINGSNOTSET = 3400 """Global settings for wrapper not set.""" WRAPPERRESOLVEINTERSECTIONFAILED = 3401 @@ -562,6 +572,32 @@ class ErrorCode(enum.IntEnum): """Base size specified for patching should be positive double. **This is a beta parameter**. **The behavior and name may change in the future**.""" + VT_INVALIDINPUT = 3600 + """Invalid input provided for VT operation.""" + VT_MERGEFACESFAILED = 3601 + """Merge faces operation failed.""" + VT_MERGETHINSTRIPESFAILED = 3602 + """Merge thin stripes operation failed.""" + VT_MERGETHINEXTFAILED = 3603 + """Merge thin extensions operation failed.""" + VT_REPAIRSHARPCORNERANGLESFAILED = 3604 + """Repair sharp corner angles operation failed.""" + VT_PINCHFACESFAILED = 3605 + """Pinch faces operation failed.""" + VT_FILLHOLEFAILED = 3606 + """Fill hole operation failed.""" + VT_FILLANNULARHOLEFAILED = 3607 + """Fill annular hole operation failed.""" + VT_COLLAPSESHORTEDGESFAILED = 3608 + """Collapse short edges operation failed.""" + VT_SEPARATEFACESFAILED = 3609 + """Separate faces operation failed.""" + VT_CREATELEADINGEDGEFAILED = 3610 + """Create leading edge operation failed.""" + VT_CREATEMIDEDGEFAILED = 3611 + """Create mid edge operation failed.""" + VT_OPERATIONFAILED = 3612 + """VT operation failed.""" CELLSEPARATIONFAILED = 6000 """Cell separation failed.""" NOCELLSSEPARATED = 6001 @@ -650,6 +686,12 @@ class ErrorCode(enum.IntEnum): """Wrong API call sequence.""" MICROSTRUCTUREBADSHAPEPROPERTIES = 13003 """Bad shape properties.""" + MICROSTRUCTURESMOOTHNOTSUPPORTED = 13004 + """Smoothing operation is not supported.""" + MICROSTRUCTUREREMESHNOTSUPPORTED = 13005 + """Surface remesh operation is not supported.""" + MICROSTRUCTUREQUADRATICHEXREQUIREDQUADRATICVOXELGRID = 13006 + """Volume mesh generation for hexahedra requires generation of a quadratic voxel grid.""" AUTOQUADMESHER_NEGATIVEINPUTPARAMETER = 15000 """Autoquadmesher error codes. @@ -692,6 +734,8 @@ class WarningCode(enum.IntEnum): """Overriding BOI sizing parameters.""" OVERRIDEMESHEDSIZINGPARAMS = 206 """Overriding meshed sizing parameters.""" + INVALIDSIZECONTROLSCOPE = 208 + """Invalid size control type provided.""" OVERRIDESUGGESTEDNAME = 301 """Override name by suggested name.""" OVERRIDESURFACESCOPEENTITY = 401 @@ -770,5 +814,15 @@ class WarningCode(enum.IntEnum): """Writing of contact pairs skipped.""" WRITINGTIESSKIPPED = 11103 """Writing of ties skipped.""" + VT_SKIPPEDPROTECTEDENTITIES = 100001 + """Input contains protected entities which have been skipped.""" + VT_SKIPPEDFEATUREENTITIES = 100002 + """Input contains feature entities which have been skipped.""" + VT_SKIPPEDFREEEDGES = 100003 + """Input contains free edges which have been skipped.""" + VT_SKIPPEDNONMANIFOLDEDGES = 100004 + """Input contains non-manifold edges which have been skipped.""" + VT_SKIPPEDENTITIESINDIFFERENTZONES = 100005 + """Input contains entities in different zones which have been skipped.""" MULTIZONEMESHER_SURFACESCOPEVOLUMESCOPEINCONSISTENCY = 110001 """MultiZone warning codes""" diff --git a/src/ansys/meshing/prime/core/dynaexportutils.py b/src/ansys/meshing/prime/core/dynaexportutils.py index c69558ed17..79e8584e5d 100644 --- a/src/ansys/meshing/prime/core/dynaexportutils.py +++ b/src/ansys/meshing/prime/core/dynaexportutils.py @@ -27,8 +27,6 @@ # please move the code to cpp or improve the code quality ########################### [TODO] ################################## -import json - import ansys.meshing.prime as prime __all__ = ['MaterialProcessor', 'DatabaseProcessor'] @@ -1951,7 +1949,7 @@ def damage_initiation(self, mat_id, mat_name, all_mat_props): if "Strain rate" in dmg_ini_props["Data"]: self._strn_rate = dmg_ini_props["Data"]["Strain rate"][0] - def get_all_commands(self, mat_id, mat_name, all_mat_props): + def get_all_commands(self, mat_id, mat_name, all_mat_props, material_used_with_shell, max_id): mat_string = "" mat_string += "*MAT_ADD_DAMAGE_DIEM_TITLE\n" mat_string += f"{mat_name}\n" @@ -1987,12 +1985,17 @@ def get_all_commands(self, mat_id, mat_name, all_mat_props): mat_string += ( "".join([self._formatter.field_float(i) for i in self._data_line["D2_DATA"]]) + "\n" ) - mat_string += curve_card if self._damage_length == 0.0 and self._strain_at_dmg_ini == 0.0 and self._strs_tria == 0.0: return "" else: - return mat_string + str_2d = "" + if material_used_with_shell: + str_2d = mat_string.replace( + self._formatter.field_int(mat_id), + self._formatter.field_int(max_id * 3 + mat_id), + ) + return mat_string + str_2d + curve_card class _MatAddInelasticity: @@ -2184,10 +2187,8 @@ class MaterialProcessor: ---------- model : prime.Model Model that the methods are to work on. - raw_materials_data : dict - The raw materials data in json dictionary format extracted from the part's simulation data. - zone_data : dict - The zone data in json dictionary format extracted from the part's simulation data. + sim_data : dict + The simulation data in json dictionary format containing materials and zone information. card_format : str, optional The LS-DYNA card format for writing. Defaults to "SHORT". @@ -2198,6 +2199,7 @@ class MaterialProcessor: __slots__ = ( '_card_format', + '_sim_data', '_raw_materials_data', '_zone_data', '_mat_id', @@ -2214,14 +2216,14 @@ class MaterialProcessor: def __init__( self, model: prime.Model, - raw_materials_data: dict, - zone_data: dict, + sim_data: dict, card_format: str = "SHORT", ): """Initialize class variables and the superclass.""" self._card_format = card_format - self._raw_materials_data = raw_materials_data - self._zone_data = zone_data + self._sim_data = sim_data + self._raw_materials_data = sim_data['Materials'] + self._zone_data = sim_data['Zones'] self._mat_id = 0 self._wt_factor = 1.0 self._material_assignedTo_zones = {} @@ -2338,8 +2340,7 @@ def get_material_commands_by_material_id(self, id: int) -> str: def _get_max_id(self): part = self._model.parts[0] fileio = prime.FileIO(self._model) - simulation_data = json.loads(fileio.get_abaqus_simulation_data(part.id)) - max_id = simulation_data['SimulationData']['max_id'] + max_id = self._sim_data['SimulationData']['max_id'] return int(max_id) def _get_zone_with_id(self, _id): @@ -2581,7 +2582,11 @@ def _get_mat_comands(self, mat_name): self._mat_id, mat_name, mat_data, e, pr, density=density ) if damage_in_mat and is_cohesive is False: - dyna_mat_card += damage.get_all_commands(self._mat_id, mat_name, mat_data) + material_used_with_shell = self._is_material_used_with_shell(mat_name) + max_id = self._get_max_id() + dyna_mat_card += damage.get_all_commands( + self._mat_id, mat_name, mat_data, material_used_with_shell, max_id + ) if damage_in_mat and is_cohesive: dyna_mat_card += damage_cohesive.get_all_commands( self._mat_id, @@ -2682,7 +2687,7 @@ def _process_elastic_modulus(self, mat_props, mat_name, mat_id, density): property_dict = mat_props['ELASTIC'] mat_elastic_card = '' elastic_type = property_dict["Parameters"]["TYPE"] - if elastic_type == "ISOTROPIC": + if elastic_type == "ISOTROPIC" or elastic_type == "ISO": # self._logger.warning( # f"Only isotropic elastic modulus is processed, " # f"Elastic Modulus for the material {material} " @@ -2782,7 +2787,7 @@ def _process_elastic_modulus(self, mat_props, mat_name, mat_id, density): def _get_elastic_modulus(self, mat_props, mat_name, mat_id): property_dict = mat_props['ELASTIC'] elastic_type = property_dict["Parameters"]["TYPE"] - if elastic_type == "ISOTROPIC": + if elastic_type == "ISOTROPIC" or elastic_type == "ISO": # self._logger.warning(f"Only isotropic elastic modulus is processed, " # f"Elastic Modulus for the material {material} " # f"is not processed.") diff --git a/src/ansys/meshing/prime/core/fileio.py b/src/ansys/meshing/prime/core/fileio.py index 35be9d1887..e5ff7fff10 100644 --- a/src/ansys/meshing/prime/core/fileio.py +++ b/src/ansys/meshing/prime/core/fileio.py @@ -336,6 +336,62 @@ def export_mapdl_cdb( result = super().export_mapdl_cdb(temp_file_name, params) return result + def initialize_cdb_export_params( + self, params: ExportMapdlCdbParams, major_version: int, minor_version: int + ) -> ExportMapdlCdbParams: + """ + Initialize specific CDB export parameters based on the given version. + + This function sets the use_compact_format and export_fasteners_as_swgen + parameters of the provided ExportMapdlCdbParams object based on the given + major and minor version numbers. Other parameters remain unchanged. + + Parameters + ---------- + params : ExportMapdlCdbParams + The CDB export parameters object to be modified. + major_version : int + The major version number. + minor_version : int + The minor version number. + + Returns + ------- + ExportMapdlCdbParams + The modified CDB export parameters object. + + Notes + ----- + **This is a beta API**. **The behavior and implementation may change in future**. + + The version is formed as "r", e.g., "24r1", "25r2". + If the version is greater than or equal to "25r1", the use_compact_format and + export_fasteners_as_swgen parameters are set to True; otherwise, they are set + to False. + + Examples + -------- + >>> file_io = prime.FileIO(model=model) + >>> params = prime.ExportMapdlCdbParams() + >>> params = file_io.initialize_cdb_export_params(params, 24, 1) + >>> params.use_compact_format + False + >>> params.export_fasteners_as_swgen + False + + >>> file_io = prime.FileIO(model=model) + >>> params = prime.ExportMapdlCdbParams() + >>> params = file_io.initialize_cdb_export_params(params, 25, 2) + >>> params.use_compact_format + True + >>> params.export_fasteners_as_swgen + True + """ + version = f"{major_version}r{minor_version}" + params.use_compact_format = version >= "25r1" + params.export_fasteners_as_swgen = version >= "25r1" + return params + def import_fluent_meshing_meshes( self, file_names: List[str], @@ -501,14 +557,9 @@ def export_lsdyna_keyword_file( part_id = self._model.parts[0].id sim_data = json.loads(super().get_abaqus_simulation_data(part_id)) if sim_data is not None: - mp = dynaexportutils.MaterialProcessor( - self._model, sim_data["Materials"], sim_data["Zones"] - ) + mp = dynaexportutils.MaterialProcessor(self._model, sim_data) all_mat_cmds = mp.get_all_material_commands() params.material_properties = all_mat_cmds + params.material_properties - dp = dynaexportutils.DatabaseProcessor(self._model, sim_data["Step"]) - all_data_cmds = dp.get_output_database_keywords() - params.database_keywords = all_data_cmds + params.database_keywords result = super().export_lsdyna_keyword_file(temp_file_name, params) return result diff --git a/src/ansys/meshing/prime/core/mapdlcdbexportutils.py b/src/ansys/meshing/prime/core/mapdlcdbexportutils.py index 6ddb7274e9..3f27465044 100644 --- a/src/ansys/meshing/prime/core/mapdlcdbexportutils.py +++ b/src/ansys/meshing/prime/core/mapdlcdbexportutils.py @@ -30,10 +30,11 @@ import json import math import os +import re from typing import Tuple import ansys.meshing.prime as prime -from ansys.meshing.prime.params.primestructs import ExportMapdlCdbParams +from ansys.meshing.prime.autogen.fileiostructs import ExportMapdlCdbParams __all__ = ['generate_mapdl_commands'] @@ -163,8 +164,9 @@ def _get_commands(self, time_pt_table_name, time_pt_table_data): for i in range(0, len(all_values), 4): values = all_values[i : i + 4] format_output += ''.join(values) + '\n' - mapdl_commands += f"*DIM, {time_pt_table_name}, ARRAY, {len(pt_data)}, 1, 1,\n" - mapdl_commands += f"*PREAD, {time_pt_table_name}, {len(pt_data)}\n" + tpname = get_modified_component_name(time_pt_table_name) + mapdl_commands += f"*DIM, {tpname}, ARRAY, {len(pt_data)}, 1, 1,\n" + mapdl_commands += f"*PREAD, {tpname}, {len(pt_data)}\n" mapdl_commands += format_output mapdl_commands += "END PREAD\n" return mapdl_commands @@ -677,6 +679,8 @@ def __init__(self, model: prime.Model, raw_materials_data, zone_data): 'HYPERELASTIC': self._process_hyperelastic_data, 'DAMPING': self._process_damping_data, 'EXPANSION': self._process_expansion_data, + 'DAMAGE EVOLUTION': self._process_damage_evolution_data, + 'DAMAGE INITIATION': self._process_damage_initiation_data, } self._model = model self._logger = model.python_logger @@ -752,6 +756,8 @@ def _get_mat_comands(self, material): "HYPERELASTIC", "DAMPING", "EXPANSION", + 'DAMAGE INITIATION', + 'DAMAGE EVOLUTION', ] # self._logger.info(mat_data) mapdl_text_data = f"! material '{material}' \n" @@ -791,6 +797,132 @@ def get_material_commands_by_material_id(self, id): break return mapdl_text_data + def _process_damage_initiation_data(self, property_dict, material, mat_id): + + damage_init_data = '' + + data = [] + parameters = [] + + if self._material_linked_to_zone_type[material] == 'Cohesive': + return '' + + if 'Parameters' in property_dict and property_dict['Parameters'] is not None: + parameters = property_dict['Parameters'] + if 'Data' in property_dict and property_dict['Data'] is not None: + data = property_dict['Data'] + if 'DEPENDENCIES' in parameters: + self._logger.warning( + f"*DAMAGE INITIATION has argument DEPENDENCIES. " + f"*DAMAGE INITIATION is not processed for material {material}" + ) + return '' + + if parameters["CRITERION"] != "DUCTILE": + self._logger.warning( + f"*DAMAGE INITIATION has argument Criteron-{parameters['CRITERION']}. " + f"*DAMAGE INITIATION is not processed for material {material}" + ) + return '' + if parameters["CRITERION"] == "DUCTILE": + f_strain = [None] + t_stress = [None] + if "Equivalent fracture strain at damage initiation" in data: + f_strain = data["Equivalent fracture strain at damage initiation"] + if "Stress triaxiality" in data: + t_stress = data["Stress triaxiality"] + strain_rate = [None] * len(f_strain) + temperature = [None] * len(f_strain) + if "Strain rate" in data: + strain_rate = data["Strain rate"] + if "Temperature" in data: + temperature = data["Temperature"] + + if len(temperature) != len(f_strain) or len(strain_rate) != len(f_strain): + self._logger.warning( + f"Inconsistent temperature/strain_rate data on *DAMAGE INITIATION." + f"*DAMAGE INITIATION is not processed for material {material}" + ) + return '' + + damage_init_data += f"TB, CDM, {mat_id},,,DUCT\n" + for fs, ts, sr, temp in zip(f_strain, t_stress, strain_rate, temperature): + if temp is not None: + damage_init_data += f"TBTEMP,{temp}\n" + damage_init_data += f"TBPT, DEFI, {ts}, {fs}\n" + damage_init_data += "\n" + return damage_init_data + + def _process_damage_evolution_data(self, property_dict, material, mat_id): + + damage_evol_data = '' + + if self._material_linked_to_zone_type[material] == 'Cohesive': + return '' + + data = [] + parameters = [] + + if 'Parameters' in property_dict and property_dict['Parameters'] is not None: + parameters = property_dict['Parameters'] + if 'Data' in property_dict and property_dict['Data'] is not None: + data = property_dict['Data'] + + if 'DEPENDENCIES' in parameters: + self._logger.warning( + f"*DAMAGE EVOLUTION has argument DEPENDENCIES. " + f"*DAMAGE EVOLUTION is not processed for material {material}" + ) + return '' + if parameters['TYPE'] != "DISPLACEMENT": + self._logger.warning( + f"*DAMAGE EVOLUTION has argument TYPE - {parameters['TYPE']}. " + f"*DAMAGE EVOLUTION is not processed for material {material}" + ) + return '' + if "SOFTENING" in parameters and parameters['SOFTENING'] != "LINEAR": + self._logger.warning( + f"*DAMAGE EVOLUTION has argument SOFTENING - {parameters['SOFTENING']}. " + f"*DAMAGE EVOLUTION is not processed for material {material}" + ) + return '' + if "DEGRADATION" in parameters and parameters['DEGRADATION'] != "MAXIMUM": + self._logger.warning( + f"*DAMAGE EVOLUTION has argument DEGRADATION - {parameters['DEGRADATION']}. " + f"*DAMAGE EVOLUTION is not processed for material {material}" + ) + return '' + if "MIXED MODE BEHAVIOR" in parameters: + self._logger.warning( + f"*DAMAGE EVOLUTION has argument MIXED MODE BEHAVIOR. " + f"*DAMAGE EVOLUTION is not processed for material {material}" + ) + return '' + + disp_at_failure = [] + disp_str = ( + f"Effective total or plastic displacement at failure, " + f"measured from the time of damage initiation" + ) + if disp_str in data: + disp_at_failure = data[disp_str] + + temperature = [None] * len(disp_at_failure) + + if "Temperature" in data: + temperature = data['Temperature'] + + damage_evol_data += f"TB, CDM, {mat_id},,,LIND\n" + for disp, temp in zip(disp_at_failure, temperature): + if temp is not None: + damage_evol_data += f"TBTEMP,{temp}\n" + d = disp + if float(disp) == 0: + d = '1e-6' + damage_evol_data += f"TBDATA, 1, {d}\n" + damage_evol_data += "\n" + return damage_evol_data + def _process_expansion_data(self, property_dict, material, mat_id): expansion_data = '' zero = 0.0 @@ -824,8 +956,8 @@ def _process_expansion_data(self, property_dict, material, mat_id): ctes = [None] if 'Temperature' in data: temperature = data['Temperature'] - if 'a' in data: - ctes = data['a'] + if 'A' in data: + ctes = data['A'] expansion_data += f"TB, CTE, {mat_id},,,\n" for temp, cte in zip(temperature, ctes): if temp is not None: @@ -1000,14 +1132,17 @@ def _process_plastic_data(self, property_dict, material, mat_id): stresses = data['Yield stress'] # temperature = data['Temperature'] data_points = len(strains) + skip_temp = False if 'Temperature' in data: temperature = data['Temperature'] unique_temperatures = len(list(set(temperature))) data_points = len(strains) / unique_temperatures if len(stresses) != len(temperature): self._logger.warning( - f"data values on *PLASTIC are not consistent for material {material}." + f"data values on *PLASTIC are not consistent for material {material}. " + f"Please check the material properties in the cdb file created" ) + skip_temp = True if len(stresses) != len(strains): self._logger.warning( f"data values on *PLASTIC are not consistent for material {material}." @@ -1015,7 +1150,7 @@ def _process_plastic_data(self, property_dict, material, mat_id): plastic_data += f"TB,PLAS,{mat_id},,{int(data_points)},MISO\n" curr_temp = None for i, strain in enumerate(strains): - if 'Temperature' in data: + if 'Temperature' in data and skip_temp != True: if curr_temp != temperature[i]: curr_temp = temperature[i] plastic_data += f"TBTEMP,{curr_temp}\n" @@ -1033,6 +1168,7 @@ def _process_hyperelastic_data(self, property_dict, material, mat_id): 'REDUCED POLYNOMIAL' not in param_keys and 'YEOH' not in param_keys and 'NEO HOOKE' not in param_keys + and 'POLYNOMIAL' not in param_keys ): self._logger.warning( f"Only parameter REDUCED POLYNOMIAL, " @@ -1110,6 +1246,95 @@ def _process_hyperelastic_data(self, property_dict, material, mat_id): ) else: pass + if 'POLYNOMIAL' in param_keys: + if 'Temperature' in data: + temperature = data['Temperature'] + temp_data_points = len(temperature) + else: + self._logger.warning( + f"temperature is not provided for HYPERELASTIC material: {material}." + ) + temp_data_points = 1 + temperature = [None] + number_of_constants = 3 + if 'C10' in data.keys(): + number_of_constants = 1 + c10 = data['C10'] + if 'D1' in data.keys(): + d1 = data['D1'] + else: + d1 = [0.0] * len(c10) + if 'C01' in data.keys(): + c01 = data['C01'] + else: + c01 = [0.0] * len(c10) + + if 'C20' in data.keys(): + number_of_constants = 2 + c20 = data['C20'] + if 'D2' in data.keys(): + d2 = data['D2'] + else: + d2 = [0.0] * len(c20) + if 'C02' in data.keys(): + c02 = data['C02'] + else: + c02 = [0.0] * len(c20) + if 'C11' in data.keys(): + c11 = data['C11'] + else: + c11 = [0.0] * len(c20) + + if 'C30' in data.keys(): + number_of_constants = 3 + c30 = data['C30'] + if 'D3' in data.keys(): + d3 = data['D3'] + else: + d3 = [0.0] * len(c30) + if 'C03' in data.keys(): + c03 = data['C03'] + else: + c03 = [0.0] * len(c30) + if 'C21' in data.keys(): + c21 = data['C21'] + else: + c21 = [0.0] * len(c30) + if 'C12' in data.keys(): + c12 = data['C12'] + else: + c12 = [0.0] * len(c30) + order_N = number_of_constants + if "N" in param_keys: + order_N = int(property_dict["Parameters"]["N"]) + if order_N > 3: + self._logger.warning( + f"HYPERELASTIC material order (N) " + f"greater than 3 is not processed.: {material}." + ) + return hyperelastic_data + + hyperelastic_data += f"TB,HYPE,{mat_id},,{number_of_constants},POLY\n" + + for i in range(len(temperature)): + if temperature[i] is not None: + hyperelastic_data += f"TBTEMP, {temperature[i]}\n" + if number_of_constants == 1: + hyperelastic_data += f"TBDATA, 1, {c10[i]}, {c01[i]}, {d1[i]}\n" + elif number_of_constants == 2: + hyperelastic_data += ( + f"TBDATA, 1, {c10[i]}, {c01[i]}, {c20[i]}, {c11[i]}, {c02[i]}, {d1[i]}\n" + ) + hyperelastic_data += f"TBDATA, 7, {d2[i]}\n" + elif number_of_constants == 3: + hyperelastic_data += ( + f"TBDATA, 1, {c10[i]}, {c01[i]}, {c20[i]}, {c11[i]}, {c02[i]}, {c30[i]}\n" + ) + hyperelastic_data += ( + f"TBDATA, 7, {c21[i]}, {c12[i]}, {c03[i]}, {d1[i]}, {d2[i]}, {d3[i]}\n" + ) + else: + pass # if 'REDUCED POLYNOMIAL' in param_keys: # if 'Temperature' in data: # temperature = data['Temperature'] @@ -1608,13 +1833,21 @@ def get_boundary_comp_name_with_base_motion(self, base_motion_name): data_lines = boundary_data['Data'] if data_lines is not None: for data_line in data_lines: - comp_names.append(str(data_line['node_set'])) + comp_names.append( + get_modified_component_name( + str(data_line['node_set']), 'NSET', self._simulation_data + ) + ) else: if base_motion_name == "": data_lines = boundary_data['Data'] if data_lines is not None: for data_line in data_lines: - comp_names.append(str(data_line['node_set'])) + comp_names.append( + get_modified_component_name( + str(data_line['node_set']), 'NSET', self._simulation_data + ) + ) return comp_names @@ -1656,9 +1889,12 @@ def _get_commands(self, boundary_data): else: last = int(data_line['last_degree']) + 1 if amplitude is not None: - # modified_amplitude_name = f"{amplitude}_{data_line['node_set']}" + # modified_amplitude_name = f"{amplitude}_{get_modified_component_name( + # data_line['node_set'])}" modified_amplitude_name = "AMPL_BOUNDARY" - applied_on = str(data_line['node_set']) + applied_on = get_modified_component_name( + str(data_line['node_set']), 'NSET', self._simulation_data + ) ampl_processor = _AmplitudeProcessor( self._model, self._simulation_data["Amplitude"] ) @@ -1673,7 +1909,13 @@ def _get_commands(self, boundary_data): self._ampl_commands += ampl_commands # ampl_processor.write_amplitude_table_to_file(ampl_commands) for i in range(first, last): - boundary_commands += f"D, {data_line['node_set']}, {dof_map[i]}, " + if data_line['node_set'].isnumeric(): + boundary_commands += f"D, {data_line['node_set']}, {dof_map[i]}, " + else: + cmname = get_modified_component_name( + data_line['node_set'], 'NSET', self._simulation_data + ) + boundary_commands += f"D, {cmname}, {dof_map[i]}, " if amplitude is not None: ac = _AmplitudeProcessor._amplitude_count boundary_commands += f"%{modified_amplitude_name}_{ac}%" @@ -1681,7 +1923,8 @@ def _get_commands(self, boundary_data): boundary_commands += f"{mag}" boundary_commands += "\n" # if i == first: - # boundary_commands += f"D, {data_line['node_set']}, {dof_map[i]}, {mag}" + # boundary_commands += ( + # f"D, {get_modified_component_name(data_line['node_set'])}, {dof_map[i]}, {mag}") # else: # additional_bcs += f', {dof_map[i]}' # boundary_commands += ', , , ' + additional_bcs + '\n' @@ -1690,9 +1933,19 @@ def _get_commands(self, boundary_data): class _DloadProcessor: - __slots__ = ('_dloads_data', '_step_start_time', '_step_end_time', '_model', '_logger') + __slots__ = ( + '_simulation_data', + '_dloads_data', + '_step_start_time', + '_step_end_time', + '_model', + '_logger', + ) - def __init__(self, model: prime.Model, data, step_start_time=0.0, step_end_time=1.0): + def __init__( + self, model: prime.Model, data, step_start_time=0.0, step_end_time=1.0, sim_data=None + ): + self._simulation_data = sim_data self._dloads_data = data self._step_start_time = step_start_time self._step_end_time = step_end_time @@ -1727,6 +1980,11 @@ def _get_commands(self, dload_data): continue mag = 0 load_type = None + elset = None + if 'element_number_or_set' in data_line: + elset = get_modified_component_name( + data_line['element_number_or_set'], 'ELSET', self._simulation_data + ) if 'magnitude' in data_line: mag = float(data_line['magnitude']) if 'type' in data_line: @@ -1741,9 +1999,14 @@ def _get_commands(self, dload_data): y = float(data_line['y']) if 'z' in data_line: z = float(data_line['z']) - if op == 'NEW': - dload_commands += "ACEL, 0.0, 0.0, 0.0 \n" - dload_commands += f"ACEL, {-x*mag}, {-y*mag}, {-z*mag}\n" + if elset: + if op == 'NEW': + dload_commands += f"CMACEL, {elset}, 0.0, 0.0, 0.0 \n" + dload_commands += f"CMACEL, {elset}, {-x*mag}, {-y*mag}, {-z*mag}\n" + else: + if op == 'NEW': + dload_commands += "ACEL, 0.0, 0.0, 0.0 \n" + dload_commands += f"ACEL, {-x*mag}, {-y*mag}, {-z*mag}\n" dload_commands += "\n" return dload_commands @@ -1841,9 +2104,12 @@ def _get_commands(self, cload_data): if 'magnitude' in data_line: mag = float(data_line['magnitude']) if amplitude is not None: - # modified_amplitude_name = f"{amplitude}_{data_line['node_set']}" + # modified_amplitude_name = ( + # f"{amplitude}_{get_modified_component_name(data_line['node_set'])}") modified_amplitude_name = "AMPL_CLOAD" - applied_on = str(data_line['node_set']) + applied_on = get_modified_component_name( + str(data_line['node_set']), 'NSET', self._simulation_data + ) ampl_processor = _AmplitudeProcessor( self._model, self._simulation_data["Amplitude"] ) @@ -1857,7 +2123,13 @@ def _get_commands(self, cload_data): ) self._ampl_commands += ampl_commands # ampl_processor.write_amplitude_table_to_file(ampl_commands) - cload_commands += f"F, {data_line['node_set']}, {dof_map[dof]}, " + if data_line['node_set'].isnumeric(): + cload_commands += f"F, {data_line['node_set']}, {dof_map[dof]}, " + else: + cmname = get_modified_component_name( + data_line['node_set'], 'NSET', self._simulation_data + ) + cload_commands += f"F, {cmname}, {dof_map[dof]}, " if amplitude is not None: ac = _AmplitudeProcessor._amplitude_count cload_commands += f"%{modified_amplitude_name}_{ac}%" @@ -1865,7 +2137,13 @@ def _get_commands(self, cload_data): cload_commands += f"{mag}" cload_commands += "\n" for dict_key, val in self._modal_load_vectors.items(): - if val["SET"] == data_line['node_set'] and val["COMP"] == dof_map[dof]: + if ( + get_modified_component_name(val["SET"], 'NSET', self._simulation_data) + == get_modified_component_name( + data_line['node_set'], 'NSET', self._simulation_data + ) + and val["COMP"] == dof_map[dof] + ): cload_lv_scale_commands += f"LVSCALE, 0, {dict_key}\n" self._load_vestors_in_current_step.append(dict_key) if amplitude: @@ -1958,7 +2236,7 @@ def _get_commands(self, connector_motion_data): # self._logger.info("Tabular Load condition is not processed.") amplitude = params['AMPLITUDE'] if 'LOAD CASE' in params: - self._logger.warning(f"Load Case in Connector Motion is not processed.") + self._logger.warning("Load Case in Connector Motion is not processed.") if 'TYPE' in params: connnector_motion_type = params['TYPE'] if 'FIXED' in params: @@ -1972,9 +2250,12 @@ def _get_commands(self, connector_motion_data): if 'magnitude' in data_line: mag = float(data_line['magnitude']) if amplitude is not None: - # modified_amplitude_name = f"{amplitude}_{data_line['node_set']}" + # modified_amplitude_name = ( + # f"{amplitude}_{get_modified_component_name(data_line['node_set'])}") modified_amplitude_name = "AMPL_CONNECTOR_MOTION" - applied_on = str(data_line['element_number_or_set']) + applied_on = get_modified_component_name( + str(data_line['element_number_or_set']), 'ELSET', self._simulation_data + ) ampl_processor = _AmplitudeProcessor( self._model, self._simulation_data["Amplitude"] ) @@ -1988,12 +2269,17 @@ def _get_commands(self, connector_motion_data): ) # ampl_processor.write_amplitude_table_to_file(ampl_commands) if data_line['element_number_or_set'].isnumeric() == False: - dls = data_line['element_number_or_set'] + dls = get_modified_component_name( + data_line['element_number_or_set'], 'ELSET', self._simulation_data + ) connector_motion_commands += f"CMSEL, S, {dls}, ELEM\n" connector_motion_commands += f"ELEM_NUM = ELNEXT(0)\n" connector_motion_commands += f"DJ, ELEM_NUM, " else: - connector_motion_commands += f"DJ, {data_line['element_number_or_set']}, " + cmname = get_modified_component_name( + str(data_line['element_number_or_set']), 'ELSET', self._simulation_data + ) + connector_motion_commands += f"DJ, {cmname}, " if connnector_motion_type == 'DISPLACEMENT': connector_motion_commands += f"{dof_map[dof]}, " elif connnector_motion_type == 'VELOCITY': @@ -2104,7 +2390,7 @@ def _get_commands(self, base_motion_data): ) return base_motion_commands if 'LOAD CASE' in params: - self._logger.warning(f"Load Case in base Motion is not processed.") + self._logger.warning("Load Case in base Motion is not processed.") if 'TYPE' in params: connnector_motion_type = params['TYPE'] if 'DOF' in params: @@ -2115,7 +2401,8 @@ def _get_commands(self, base_motion_data): base_name = params['BASE NAME'] # Processing the Base Motion if amplitude is not None: - # modified_amplitude_name = f"{amplitude}_{data_line['node_set']}" + # modified_amplitude_name = ( + # f"{amplitude}_{get_modified_component_name(data_line['node_set'])}") modified_amplitude_name = "AMPL_BASE_MOTION_" + connnector_motion_type applied_on = base_name ampl_processor = _AmplitudeProcessor(self._model, self._simulation_data["Amplitude"]) @@ -2275,6 +2562,66 @@ def _get_friction_value_for_interaction(self, interaction_name): return fric +class _MonitorProcessor: + __slots__ = ('_simulation_data', '_monitor_data', '_model', '_logger') + + def __init__(self, model: prime.Model, data, sim_data=None): + self._monitor_data = data + self._model = model + self._logger = model.python_logger + self._simulation_data = sim_data + + def get_monitor_commands(self): + monitor_commands = "" + dof_map = { + 1: 'UX', + 2: 'UY', + 3: 'UZ', + 4: 'ROTX', + 5: 'ROTY', + 6: 'ROTZ', + } + counter = 0 + for monitor_data in self._monitor_data: + params = monitor_data['Parameters'] + if params is not None: + dof = None + node = None + if 'FREQUENCY' in params: + self._logger.warning('FREQUENCY on *MONITOR is not processed.') + if 'DOF' in params: + dof = int(params['DOF']) + if 'NODE' in params: + node = params['NODE'] + if dof is None or node is None: + self._logger.warning( + f"NODE/DOF is not provided on *MONITOR. " + f"translation of *MONITOR is ignored." + ) + else: + str_dof = "" + str_node = "" + if dof in dof_map: + str_dof = dof_map[dof] + else: + self._logger.warning( + f"Invalid DOF (not from this 1-6 == UX,UY,UZ,ROTX,ROTY,ROTZ) " + f"is provided on *MONITOR. translation of *MONITOR is ignored." + ) + continue + if node.isnumeric(): + str_node = node + else: + cmname = get_modified_component_name(node, 'NSET', self._simulation_data) + monitor_commands += f"CMSEL,S,{cmname},NODE\n" + monitor_commands += f"node_id = NDNEXT(0)\n" + monitor_commands += f"ALLSEL\n" + str_node = 'node_id' + counter += 1 + monitor_commands += f"MONITOR,{counter},{str_node},{str_dof}\n" + return monitor_commands + + class _GlobalDampingProcessing: __slots__ = ('_global_damping_data', '_model', '_logger') @@ -2346,9 +2693,10 @@ class _StepProcessor: '_connector_motion_ampl_commands', '_model', '_logger', + '_model_application', ) - def __init__(self, model: prime.Model, data, sim_data): + def __init__(self, model: prime.Model, data, sim_data, model_application): self._simulation_data = sim_data self._steps_data = data self._curr_step = None @@ -2371,6 +2719,7 @@ def __init__(self, model: prime.Model, data, sim_data): self._connector_motion_ampl_commands = '' self._model = model self._logger = model.python_logger + self._model_application = model_application def get_cload_ampl_commands(self): return self._cload_ampl_commands @@ -2447,6 +2796,8 @@ def get_static_analysis_data(self, static_data): # min_time_increment = time_interval_val # if max_time_increment > time_interval_val: # max_time_increment = time_interval_val + if time_increment > max_time_increment: + time_increment = max_time_increment self._step_start_time = self._time self._time += time_period self._step_end_time = self._time @@ -2455,12 +2806,18 @@ def get_static_analysis_data(self, static_data): f'STEP: {self._step_counter} -----------------------\n' ) if self._previous_analysis != "STATIC": - static_analysis_commands += f'ANTYPE, STATIC\n' + if self._model_application == prime.CdbAnalysisType.OUTERPANELSTIFFNESS: + static_analysis_commands += 'ANTYPE, TRANSIENT\n' + static_analysis_commands += 'TIMINT, ON\n' + static_analysis_commands += 'TINTP, QUASI\n' + static_analysis_commands += 'NROPT, FULL\n' + else: + static_analysis_commands += 'ANTYPE, STATIC\n' static_analysis_commands += f'TIME,{self._time}\n' static_analysis_commands += ( f'DELTIM, {time_increment}, {min_time_increment}, {max_time_increment}\n' ) - static_analysis_commands += f'\n' + static_analysis_commands += '\n' self._previous_analysis = "STATIC" return static_analysis_commands @@ -2503,6 +2860,8 @@ def get_modal_dynamic_analysis_data(self, dynamic_data): # # min_time_increment = time_interval_val # # if max_time_increment > time_interval_val: # # max_time_increment = time_interval_val + if time_increment > max_time_increment: + time_increment = max_time_increment if 'Parameters' in dynamic_data: data = dynamic_data['Parameters'] if data: @@ -2613,6 +2972,8 @@ def get_dynamic_analysis_data(self, dynamic_data): # min_time_increment = time_interval_val # if max_time_increment > time_interval_val: # max_time_increment = time_interval_val + if time_increment > max_time_increment: + time_increment = max_time_increment if 'Parameters' in dynamic_data: data = dynamic_data['Parameters'] if data: @@ -2737,6 +3098,34 @@ def get_frequency_analysis_data(self, frequency_data): ): modopt_method = 'QRDAMP' break + if "Materials" in self._simulation_data: + all_mats = self._simulation_data['Materials'] + if all_mats: + for mat in all_mats: + _mat = all_mats[mat] + if _mat and 'DAMPING' in _mat: + if 'Parameters' in _mat['DAMPING']: + if "ALPHA" in _mat['DAMPING']['Parameters']: + modopt_method = 'QRDAMP' + break + if "Mass" in self._simulation_data: + all_mass = self._simulation_data['Mass'] + if all_mass: + for m in all_mass: + _m = all_mass[m] + if 'Parameters' in _m: + if "ALPHA" in _m['Parameters']: + modopt_method = 'QRDAMP' + break + if "RotaryInertia" in self._simulation_data: + all_mass = self._simulation_data['RotaryInertia'] + if all_mass: + for m in all_mass: + _m = all_mass[m] + if 'Parameters' in _m: + if "ALPHA" in _m['Parameters']: + modopt_method = 'QRDAMP' + break frequency_analysis_commands += ( f'! -------------------------- STEP: {self._step_counter} -----------------------\n' ) @@ -2927,7 +3316,16 @@ def get_output_analysis_data(self, output_data): f"used in *OUTPUT commands. Only first one will be used" ) time_points = parameters['TIME POINT'] + if 'TIME POINTS' in parameters: + if time_points is not None and time_points != parameters['TIME POINTS']: + self._logger.warning( + f"Warning: There are multiple TIME POINT arrays " + f"used in *OUTPUT commands. Only first one will be used" + ) + time_points = parameters['TIME POINTS'] # above lines to be checked because of limitation on ansys OUTRES commands. + if time_points is not None: + time_points = get_modified_component_name(time_points) if len(output_data) > 0: output_analysis_commands += "\n" output_analysis_commands += "OUTRES, ERASE\n" @@ -2988,12 +3386,91 @@ def get_output_analysis_data(self, output_data): output_analysis_commands += "0\n" else: output_analysis_commands += "1\n" + # if 'Parameters' in nodeout: + # if ( + # nodeout['Parameters'] is not None + # and 'NSET' in nodeout['Parameters'] + # ): + # output_analysis_commands += get_modified_component_name( + # nodeout['Parameters']['NSET'],'NSET', self._simulation_data) + if 'EnergyOutput' in output['Data']: + for enrgout in output['Data']['EnergyOutput']: + if enrgout is not None and enrgout['Data'] is not None: + # enrgout = output['EnergyOutput'] + output_analysis_commands += "OUTRES, " + output_analysis_commands += "VENG, " + if time_points is not None: + output_analysis_commands += f'%{time_points}%, ' + else: + if ninterval: + if number_interval_to_table: + output_analysis_commands += f'%{ninterval}%, ' + else: + output_analysis_commands += f'-{ninterval}, ' + elif nfreq: + output_analysis_commands += f'{nfreq}, ' + else: + output_analysis_commands += 'ALL, ' + if 'Parameters' in enrgout: + if ( + enrgout['Parameters'] is not None + and 'ELSET' in enrgout['Parameters'] + ): + output_analysis_commands += get_modified_component_name( + enrgout['Parameters']['ELSET'], + 'ELSET', + self._simulation_data, + ) + output_analysis_commands += ', ,\n' if 'NodeOutput' in output['Data']: for nodeout in output['Data']['NodeOutput']: if nodeout is not None and nodeout['Data'] is not None: # nodeout = output['NodeOutput'] for key in nodeout['Data']['keys']: - if key in ["RF", "U", "UT", "U1", "U2", "U3", "A1", "A2", "A3"]: + if key in [ + "RF", + "U", + "UT", + "U1", + "U2", + "U3", + "A1", + "A2", + "A3", + "CF", + ]: + if key == "CF": + if 'Parameters' in nodeout: + if ( + nodeout['Parameters'] is not None + and 'NSET' in nodeout['Parameters'] + ): + output_analysis_commands += ( + "CMSEL,S," + + get_modified_component_name( + nodeout['Parameters']['NSET'], + 'NSET', + self._simulation_data, + ) + + ",NODE\n" + ) + output_analysis_commands += ( + "NSLE\nESEL,R,ELEM,,174\n" + ) + output_analysis_commands += ( + "CM," + + get_modified_component_name( + nodeout['Parameters']['NSET'], + 'NSET', + self._simulation_data, + ) + + "_CONTACT, ELEM\n" + ) + output_analysis_commands += "ALLSEL\n" + else: + output_analysis_commands += "ESEL,S,ELEM,,174\n" + output_analysis_commands += "CM,All_CONTACT,ELEM\n" + output_analysis_commands += "ALLSEL\n" output_analysis_commands += "OUTRES, " if key == "RF": output_analysis_commands += "RSOL, " @@ -3028,9 +3505,42 @@ def get_output_analysis_data(self, output_data): nodeout['Parameters'] is not None and 'NSET' in nodeout['Parameters'] ): - output_analysis_commands += nodeout['Parameters'][ - 'NSET' - ] + output_analysis_commands += ( + get_modified_component_name( + nodeout['Parameters']['NSET'], + 'NSET', + self._simulation_data, + ) + ) + elif key == "CF": + output_analysis_commands += "MISC, " + if time_points is not None: + output_analysis_commands += f'%{time_points}%, ' + else: + if ninterval: + if number_interval_to_table: + output_analysis_commands += f'%{ninterval}%, ' + else: + output_analysis_commands += f'-{ninterval}, ' + elif nfreq: + output_analysis_commands += f'{nfreq}, ' + else: + output_analysis_commands += 'ALL, ' + if 'Parameters' in nodeout: + if ( + nodeout['Parameters'] is not None + and 'NSET' in nodeout['Parameters'] + ): + output_analysis_commands += ( + get_modified_component_name( + nodeout['Parameters']['NSET'], + 'NSET', + self._simulation_data, + ) + + "_CONTACT" + ) + else: + output_analysis_commands += "All_CONTACT" output_analysis_commands += ', ,\n' else: self._logger.warning( @@ -3041,9 +3551,20 @@ def get_output_analysis_data(self, output_data): if elemout is not None and elemout['Data'] is not None: # elemout = output['ElementOutput'] for key in elemout['Data']['keys']: - if key in ["S", "CTF", "NFORC"]: + if key in [ + "S", + "CTF", + "NFORC", + "LE", + "PEEQ", + "SINV", + "PE", + "MISESMAX", + "PEEQ", + "PEEQMAX", + ]: output_analysis_commands += "OUTRES, " - if key == "S": + if key in ["S", "SINV", "MISESMAX"]: output_analysis_commands += "STRS, " if time_points is not None: output_analysis_commands += f'%{time_points}%, ' @@ -3062,9 +3583,96 @@ def get_output_analysis_data(self, output_data): elemout['Parameters'] is not None and 'ELSET' in elemout['Parameters'] ): - output_analysis_commands += elemout['Parameters'][ - 'ELSET' - ] + output_analysis_commands += ( + get_modified_component_name( + elemout['Parameters']['ELSET'], + 'ELSET', + self._simulation_data, + ) + ) + elif key in ["PEEQ", "PEEQMAX"]: + output_analysis_commands += "NLDAT, " + if time_points is not None: + output_analysis_commands += f'%{time_points}%, ' + else: + if ninterval: + if number_interval_to_table: + output_analysis_commands += f'%{ninterval}%, ' + else: + output_analysis_commands += f'-{ninterval}, ' + elif nfreq: + output_analysis_commands += f'{nfreq}, ' + else: + output_analysis_commands += 'ALL, ' + if 'Parameters' in elemout: + if ( + elemout['Parameters'] is not None + and 'ELSET' in elemout['Parameters'] + ): + output_analysis_commands += ( + get_modified_component_name( + elemout['Parameters']['ELSET'], + 'ELSET', + self._simulation_data, + ) + ) + elif key == "PE": + out_cmds = "" + out_cmds += "EPPL, " + if time_points is not None: + out_cmds += f'%{time_points}%, ' + else: + if ninterval: + if number_interval_to_table: + out_cmds += f'%{ninterval}%, ' + else: + out_cmds += f'-{ninterval}, ' + elif nfreq: + out_cmds += f'{nfreq}, ' + else: + out_cmds += 'ALL, ' + if 'Parameters' in elemout: + if ( + elemout['Parameters'] is not None + and 'ELSET' in elemout['Parameters'] + ): + out_cmds += get_modified_component_name( + elemout['Parameters']['ELSET'], + 'ELSET', + self._simulation_data, + ) + output_analysis_commands += ( + out_cmds + + ', ,\n' + + "OUTRES, " + + out_cmds.replace("EPPL", "NLDAT") + ) + elif key == "LE": + output_analysis_commands += "EPEL, " + if time_points is not None: + output_analysis_commands += f'%{time_points}%, ' + else: + if ninterval: + if number_interval_to_table: + output_analysis_commands += f'%{ninterval}%, ' + else: + output_analysis_commands += f'-{ninterval}, ' + elif nfreq: + output_analysis_commands += f'{nfreq}, ' + else: + output_analysis_commands += 'ALL, ' + if 'Parameters' in elemout: + if ( + elemout['Parameters'] is not None + and 'ELSET' in elemout['Parameters'] + ): + output_analysis_commands += ( + get_modified_component_name( + elemout['Parameters']['ELSET'], + 'ELSET', + self._simulation_data, + ) + ) elif key == "CTF": output_analysis_commands += "MISC, " if time_points is not None: @@ -3084,9 +3692,13 @@ def get_output_analysis_data(self, output_data): elemout['Parameters'] is not None and 'ELSET' in elemout['Parameters'] ): - output_analysis_commands += elemout['Parameters'][ - 'ELSET' - ] + output_analysis_commands += ( + get_modified_component_name( + elemout['Parameters']['ELSET'], + 'ELSET', + self._simulation_data, + ) + ) elif key == "NFORC": output_analysis_commands += "NLOAD, " if time_points is not None: @@ -3106,9 +3718,13 @@ def get_output_analysis_data(self, output_data): elemout['Parameters'] is not None and 'ELSET' in elemout['Parameters'] ): - output_analysis_commands += elemout['Parameters'][ - 'ELSET' - ] + output_analysis_commands += ( + get_modified_component_name( + elemout['Parameters']['ELSET'], + 'ELSET', + self._simulation_data, + ) + ) output_analysis_commands += ', ,\n' else: self._logger.warning( @@ -3124,7 +3740,9 @@ def get_output_analysis_data(self, output_data): elemout['Parameters'] is not None and 'ELSET' in elemout['Parameters'] ): - output_analysis_commands += elemout['Parameters']['ELSET'] + output_analysis_commands += get_modified_component_name( + elemout['Parameters']['ELSET'], 'ELSET', self._simulation_data + ) output_analysis_commands += "\n" return output_analysis_commands @@ -3181,10 +3799,18 @@ def create_modal_vectors(self): vector_commands += f'SFEDELE, ALL, ALL, ALL \n' vector_commands += f'ACEL, 0, 0, 0 \n' vector_commands += f'\n' - vector_commands += f"F, {data_line['node_set']}, {dof_map[dof]}, 1\n" + if data_line['node_set'].isnumeric(): + vector_commands += ( + f"F, " f"{data_line['node_set']}, " f"{dof_map[dof]}, 1\n" + ) + else: + cmname = get_modified_component_name( + data_line['node_set'], 'NSET', self._simulation_data + ) + vector_commands += f"F, " f"{cmname}, " f"{dof_map[dof]}, 1\n" count_load_vectors += 1 self._modal_load_vectors[count_load_vectors] = { - 'SET': data_line['node_set'], + 'SET': cmname, "COMP": dof_map[dof], } if "BaseMotion" in step_data: @@ -3195,7 +3821,9 @@ def create_modal_vectors(self): continue base_name = "" if 'BASE NAME' in params: - base_name = params['BASE NAME'] + base_name = get_modified_component_name( + params['BASE NAME'], 'NSET', self._simulation_data + ) boundaries_for_motion = self.get_step_boundary_component_data( self._curr_step["Boundary"], base_name ) @@ -3280,7 +3908,11 @@ def get_step_cload_data(self, cloads_data): def get_step_dload_data(self, dloads_data): dload_processor = _DloadProcessor( - self._model, dloads_data, self._step_start_time, self._step_end_time + self._model, + dloads_data, + self._step_start_time, + self._step_end_time, + sim_data=self._simulation_data, ) # TODO this needs to be in List of cloads instead of single Cload dload_commands = '' @@ -3321,6 +3953,11 @@ def get_global_damping_commnads(self, global_damping_data): ) return global_damping_commands + def get_monitor_commands(self, monitor_data): + monitor_processor = _MonitorProcessor(self._model, monitor_data, self._simulation_data) + monitor_commands = monitor_processor.get_monitor_commands() + return monitor_commands + def _get_current_step_analysis_type(self, step_data): mapdl_step_commands = '/solu\n' # processed_analysis_type = ["Static", "FREQUENCY", @@ -3356,6 +3993,7 @@ def _process_step(self, step_data): 'Frequency': self.get_frequency_analysis_data, 'SteadyStateDynamics': self.get_steady_state_dynamics_data, 'GlobalDamping': self.get_global_damping_commnads, + 'Monitor': self.get_monitor_commands, } keys = [ 'Static', @@ -3369,6 +4007,7 @@ def _process_step(self, step_data): 'ConnectorMotion', 'BaseMotion', 'Dload', + 'Monitor', 'Output', ] mapdl_step_commands = '' @@ -3392,9 +4031,8 @@ def _process_step(self, step_data): else: mapdl_step_commands += "OFF\n" if 'UNSYMM' in step_params: - mapdl_step_commands += "NROPT, " if step_params['UNSYMM'] == "YES": - mapdl_step_commands += "UNSYM\n" + mapdl_step_commands += "NROPT, UNSYM\n" mapdl_step_commands += 'RESCONTROL,,NONE,NONE\n' mapdl_step_commands += '\n' mapdl_step_commands += 'DMPOPT, RST, YES \n' @@ -3517,6 +4155,51 @@ def _modify_section_data(self, behavior_data): return secdata_string +def get_modified_component_name(name: str, set_type: str = None, sim_data=None) -> str: + """ + Modify a component name to meet specific criteria. + + This function replaces any non-alphanumeric characters with underscores + and adds the prefix "COMP_" if the name starts with a digit or underscore. + + Parameters + ---------- + name : str + The original component name. + + set_type : str + The type of component (NSET, ELSET, SURFACE), used if there is a potential name conflict. + + sim_data + The simulation data of the part in json format, used if there is a potential name conflict. + + Returns + ------- + str + The modified component name. + + Notes + ----- + This function is designed to sanitize component names for specific use cases + where restrictions might exist on allowed characters and initial characters. + """ + modified_name = re.sub(r"[^\w]", "_", name) + + if set_type == 'NSET': + has_surface = 'Surface' in sim_data and name in sim_data['Surface'] + has_nset = 'Nset' in sim_data and name in sim_data['Nset'] + has_elset = 'Elset' in sim_data and name in sim_data['Elset'] + + count = has_surface + has_nset + has_elset + + if count > 1: + modified_name = set_type + "_" + name + + if modified_name and (modified_name[0].isdigit() or modified_name[0] == "_"): + modified_name = "COMP_" + modified_name + return modified_name + + def generate_mapdl_commands( model: prime.Model, simulation_data: str, params: ExportMapdlCdbParams ) -> Tuple[str, str]: @@ -3593,7 +4276,10 @@ def generate_mapdl_commands( base_motion_ampl_commands = '' if "Step" in json_simulation_data: steps_data = _StepProcessor( - model, json_simulation_data["Step"], sim_data=json_simulation_data + model, + json_simulation_data["Step"], + sim_data=json_simulation_data, + model_application=params.analysis_type, ) step_settings = steps_data.get_all_steps() ninterval_mapdl_commands = steps_data.get_ninterval_mapdl_commands() @@ -3633,6 +4319,7 @@ def generate_mapdl_commands( # analysis_settings += '!cnch,conv\n' # analysis_settings += '!-------------------------------------------------\n' # analysis_settings += '\n' + analysis_settings += '\nALLSEL\n' if general_contact_cmds: analysis_settings += general_contact_cmds analysis_settings += '!-------------------------------------------------\n' diff --git a/src/ansys/meshing/prime/core/model.py b/src/ansys/meshing/prime/core/model.py index 081ae1456f..8adf40f0a8 100644 --- a/src/ansys/meshing/prime/core/model.py +++ b/src/ansys/meshing/prime/core/model.py @@ -27,6 +27,8 @@ from ansys.meshing.prime.autogen.model import Model as _Model # isort: split +import os + import ansys.meshing.prime.internals.json_utils as json from ansys.meshing.prime.autogen.commonstructs import DeleteResults from ansys.meshing.prime.autogen.materialpointmanager import MaterialPointManager @@ -97,10 +99,15 @@ def _sync_up_model(self): sc_data = res["SizeControl"] pc_data = res["PrismControl"] wc_data = res["WrapperControl"] - mc_data = res["WrapperControl"] + mc_data = res["MultiZoneControl"] vc_data = res["VolumeControl"] + + if "ThinVolumeControl" in res: + tvc_data = res["ThinVolumeControl"] + if "PeriodicControl" in res: percon_data = res["PeriodicControl"] + sf_params = res["GlobalSizingParams"] self._global_sf_params = GlobalSizingParams( @@ -113,6 +120,7 @@ def _sync_up_model(self): self._control_data._update_wrapper_controls(wc_data) self._control_data._update_multi_zone_controls(mc_data) self._control_data._update_volume_controls(vc_data) + self._control_data._update_thin_volume_controls(tvc_data) self._topo_data = TopoData(self, -1, res["TopoData"], "") if "PeriodicControl" in res: self._control_data._update_periodic_controls(percon_data) @@ -287,6 +295,29 @@ def set_global_sizing_params(self, params: GlobalSizingParams): _Model.set_global_sizing_params(self, params) self._global_sf_params = params + def set_working_directory(self, path: str): + """Set working directory. + + Set the working directory to be considered for file i/o when the file paths are relative. + + Parameters + ---------- + path : str + Path to the directory. + + Notes + ----- + **This is a beta API**. **The behavior and implementation may change in future**. + + Examples + -------- + >>> model = prime.local_model + >>> zones = model.set_working_directory("C:/input_files") + + """ + _Model.set_working_directory(self, path) + os.chdir(path) + def __str__(self): """Print the summary of the model. diff --git a/src/ansys/meshing/prime/internals/communicator.py b/src/ansys/meshing/prime/internals/communicator.py index 02a3aab458..958717423b 100644 --- a/src/ansys/meshing/prime/internals/communicator.py +++ b/src/ansys/meshing/prime/internals/communicator.py @@ -81,6 +81,29 @@ def run_on_server(self, model, recipe: str) -> dict: """ pass + @abstractmethod + def server_command(self, command: str, *args) -> dict: + """Run commands on the server. + + Parameters + ---------- + command : str + Commands to run. + + Returns + ------- + dict + Result from the server side. + + Raises + ------ + RuntimeError + Bad response from server. + RuntimeError + Can not connect to server. + """ + pass + @abstractmethod def import_cad(self, model, file_name: str, *args) -> dict: """Import a CAD file from local. diff --git a/src/ansys/meshing/prime/internals/error_handling.py b/src/ansys/meshing/prime/internals/error_handling.py index dec0a8ba61..8ee7f361a0 100644 --- a/src/ansys/meshing/prime/internals/error_handling.py +++ b/src/ansys/meshing/prime/internals/error_handling.py @@ -46,6 +46,7 @@ ErrorCode.SURFERLAYEREDQUADFAILED: "Layered quad meshing failed.", ErrorCode.SURFERINVALIDINPUT: "Surface meshing invalid input.", ErrorCode.SURFERNONMANIFOLDEDGE: "Surface meshing non manifold edge.", + ErrorCode.SURFEROPENINITIALFRONTLOOP: "Surface meshing open initial front loop.", ErrorCode.LOCALSURFERINVALIDNUMRINGS: "Invalid number of rings input for the local surface mesh operation.", ErrorCode.SURFERQUADFAILED: " Quad meshing failed for surface meshing.", ErrorCode.REMESHFACEZONELETSNOTSUPPORTEDFORTOPOLOGYPART: "Remesh face zonelets is not supported for part with topology data. Try mesh_topo_faces in surfer.", @@ -196,6 +197,7 @@ ErrorCode.NOINPUT: "No input provided.", ErrorCode.DELETEPARTSFAILED: "Delete parts failed.", ErrorCode.DELETECONTROLSFAILED: "Delete controls failed.", + ErrorCode.OCTREELIMITREACHED: "Limit reached for the number of octants supported. Use a coarser sizing and try again.", ErrorCode.WRAPPERGLOBALSETTINGSNOTSET: "Wrapper global settings are not set.", ErrorCode.WRAPPERRESOLVEINTERSECTIONFAILED: "Wrapper resolve intersection step failed.", ErrorCode.WRAPPERCONNECTFAILED: "Wrapper connection generic failure.", diff --git a/src/ansys/meshing/prime/internals/grpc_communicator.py b/src/ansys/meshing/prime/internals/grpc_communicator.py index 0ab1ee4ca7..e40b11621e 100644 --- a/src/ansys/meshing/prime/internals/grpc_communicator.py +++ b/src/ansys/meshing/prime/internals/grpc_communicator.py @@ -279,6 +279,47 @@ def run_on_server(self, model: Model, recipe: str) -> dict: else: raise RuntimeError("No connection with server") + def server_command(self, command: str, *args) -> dict: + """Run commands on the server. + + Parameters + ---------- + command : str + Commands to run. + + Returns + ------- + dict + Result from the server side. + + Raises + ------ + RuntimeError + Bad response from server. + RuntimeError + Can not connect to server. + """ + if self._stub is not None: + command = {"Command": command} + if len(args) > 0: + command.update({"Args": args[0]}) + + response = self._stub.ServerCommand( + request_iterator( + 0, + json.dumps(command), + prime_pb2.StringMessage, + prime_pb2.Model, + prime_pb2.StringJsonContent, + prime_pb2.MessageCompletionToken, + ) + ) + message = get_response(response, '') + return message + else: + raise RuntimeError("No connection with server") + return {} + def close(self): """Close opened channels.""" self._stub = None diff --git a/src/ansys/meshing/prime/internals/launcher.py b/src/ansys/meshing/prime/internals/launcher.py index 9dd06b950a..1e3ab00d2f 100644 --- a/src/ansys/meshing/prime/internals/launcher.py +++ b/src/ansys/meshing/prime/internals/launcher.py @@ -44,7 +44,7 @@ def get_install_locations(): - supported_versions = ['242'] + supported_versions = ['251'] awp_roots = {ver: os.environ.get(f'AWP_ROOT{ver}', '') for ver in supported_versions} installed_versions = { ver: os.path.join(path, 'meshing', 'Prime') diff --git a/src/ansys/meshing/prime/internals/prime_communicator.py b/src/ansys/meshing/prime/internals/prime_communicator.py index 25cb1972a4..7562f804d5 100644 --- a/src/ansys/meshing/prime/internals/prime_communicator.py +++ b/src/ansys/meshing/prime/internals/prime_communicator.py @@ -124,6 +124,28 @@ def run_on_server(self, model, recipe: str) -> dict: result = json.loads(output) return result + def server_command(self, command: str, *args) -> dict: + """Run commands on the server. + + Parameters + ---------- + command : str + Commands to run. + + Returns + ------- + dict + Result from the server side. + + Raises + ------ + RuntimeError + Bad response from server. + RuntimeError + Can not connect to server. + """ + return {} + def close(self): """Close session.""" Prime.Finalize()