Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Include ADO changes #973

Merged
merged 11 commits into from
Feb 3, 2025
2 changes: 1 addition & 1 deletion .github/workflows/ci_cd.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ on:

env:
DOCKER_IMAGE_NAME: ghcr.io/ansys/prime
DOCKER_IMAGE_TAG: '25.1.0'
DOCKER_IMAGE_TAG: '25.1.1.dev0'
MAIN_PYTHON_VERSION: '3.12'
PACKAGE_NAME: 'ansys-meshing-prime'
PACKAGE_NAMESPACE: 'ansys.meshing.prime'
Expand Down
1 change: 1 addition & 0 deletions doc/changelog.d/973.maintenance.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Include ADO changes
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "flit_core.buildapi"

[project]
name = "ansys-meshing-prime"
version = "0.7.0"
version = "0.8.0.dev0"
description = "PyPrimeMesh is a Python client to Ansys Prime Server, which delivers core Ansys meshing technology."
readme = "README.md"
requires-python = ">=3.10,<4"
Expand Down
212 changes: 171 additions & 41 deletions src/ansys/meshing/prime/autogen/fileiostructs.py

Large diffs are not rendered by default.

12 changes: 12 additions & 0 deletions src/ansys/meshing/prime/autogen/primeconfig.py
Original file line number Diff line number Diff line change
Expand Up @@ -801,6 +801,18 @@ class ErrorCode(enum.IntEnum):
AUTOQUADMESHER_INVALIDMINMAXSIZES = 15001
"""Difference in maximum value and minimum value is negative.

**This is a beta parameter**. **The behavior and name may change in the future**."""
IMPORTABAQUSFAILEDWITHUNKNOWNERROR = 16200
"""Import Abaqus failed. Failed with unknown error.

**This is a beta parameter**. **The behavior and name may change in the future**."""
IMPORTABAQUSFAILEDWITHPARSINGFAILURE = 16201
"""Import Abaqus failed. Failed to parse file.

**This is a beta parameter**. **The behavior and name may change in the future**."""
IMPORTABAQUSFAILEDDURINGMESHCREATION = 16202
"""Import Abaqus failed. Failed to create mesh entities.

**This is a beta parameter**. **The behavior and name may change in the future**."""
ZEROELEMENTSREADFROMCDBFILE = 16500
"""No elements read from CDB file.
Expand Down
21 changes: 14 additions & 7 deletions src/ansys/meshing/prime/core/fileio.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@
ImportMapdlCdbParams,
ImportMapdlCdbResults,
ReadSizeFieldParams,
SeparateBlocksFormatType,
SizeFieldFileReadResults,
WriteSizeFieldParams,
)
Expand Down Expand Up @@ -342,7 +343,7 @@ def initialize_cdb_export_params(
"""
Initialize specific CDB export parameters based on the given version.

This function sets the use_compact_format, export_fasteners_as_swgen and
This function sets the separate_blocks_format_type, export_fasteners_as_swgen and
export_rigid_bodies_as_rbgen parameters of the provided ExportMapdlCdbParams
object based on the given major and minor version numbers.
Other parameters remain unchanged.
Expand All @@ -366,16 +367,16 @@ def initialize_cdb_export_params(
**This is a beta API**. **The behavior and implementation may change in future**.

The version is formed as "<major_version>r<minor_version>", e.g., "24r1", "25r2".
If the version is greater than or equal to "25r1", the use_compact_format,
export_fasteners_as_swgen and export_rigid_bodies_as_rbgen parameters are set
to True. Otherwise, they are set to False.
If the version is greater than or equal to "25r1", write_separate_blocks is set to True
with COMPACT format, and export_fasteners_as_swgen and export_rigid_bodies_as_rbgen
parameters are set to True. Otherwise, they are set to False.

Examples
--------
>>> file_io = prime.FileIO(model=model)
>>> params = prime.ExportMapdlCdbParams()
>>> params = file_io.initialize_cdb_export_params(params, 24, 1)
>>> params.use_compact_format
>>> params.write_separate_blocks
False
>>> params.export_fasteners_as_swgen
False
Expand All @@ -385,15 +386,21 @@ def initialize_cdb_export_params(
>>> file_io = prime.FileIO(model=model)
>>> params = prime.ExportMapdlCdbParams()
>>> params = file_io.initialize_cdb_export_params(params, 25, 2)
>>> params.use_compact_format
>>> params.write_separate_blocks
True
>>> params.separate_blocks_format_type
SeparateBlocksFormatType.COMPACT
>>> params.export_fasteners_as_swgen
True
>>> params.export_rigid_bodies_as_swgen
True
"""
version = f"{major_version}r{minor_version}"
params.use_compact_format = version >= "25r1"
params.write_separate_blocks = version >= "25r1"
if version >= "25r1":
params.separate_blocks_format_type = SeparateBlocksFormatType.COMPACT
else:
params.separate_blocks_format_type = SeparateBlocksFormatType.STANDARD
params.export_fasteners_as_swgen = version >= "25r1"
params.export_rigid_bodies_as_rbgen = version >= "25r1"
return params
Expand Down
92 changes: 65 additions & 27 deletions src/ansys/meshing/prime/core/mapdlcdbexportutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -658,6 +658,7 @@ class _MaterialProcessor:
__slots__ = (
'_raw_materials_data',
'_zone_data',
'_enable_hm_comments',
'_mat_id',
'_material_linked_to_zone_type',
'_cohezive_zone_thickness_data',
Expand All @@ -666,10 +667,11 @@ class _MaterialProcessor:
'_logger',
)

def __init__(self, model: prime.Model, raw_materials_data, zone_data):
def __init__(self, model: prime.Model, raw_materials_data, zone_data, hm_comments=False):
self._raw_materials_data = raw_materials_data
self._zone_data = zone_data
self._mat_id = 0
self._enable_hm_comments = hm_comments
self._material_linked_to_zone_type = {}
self._cohezive_zone_thickness_data = {}
self._property_function_map = {
Expand Down Expand Up @@ -762,7 +764,13 @@ def _get_mat_comands(self, material):
'DAMAGE EVOLUTION',
]
# self._logger.info(mat_data)
mapdl_text_data = f"! material '{material}' \n"
mapdl_text_data = ""
hm_comment = self._enable_hm_comments
if hm_comment:
mapdl_text_data += "!!HMNAME MAT \n"
mapdl_text_data += f'!!{self._mat_id:>10} "{material}"\n'
else:
mapdl_text_data += f"! material '{material}' \n"
if "Parameters" in mat_data:
self._logger.warning(f"Parameter on Material {material} are not processed.")
for prop in mat_data:
Expand Down Expand Up @@ -1030,7 +1038,10 @@ def _process_elastic_modulus(self, property_dict, material, mat_id):
data = []
if 'Data' in property_dict and property_dict['Data'] is not None:
data = property_dict['Data']
if property_dict["Parameters"]["TYPE"] == "ISOTROPIC":
if (
property_dict["Parameters"]["TYPE"] == "ISOTROPIC"
or property_dict["Parameters"]["TYPE"] == "ISO"
):
# self._logger.warning(f"Only isotropic elastic modulus is processed, "
# f"Elastic Modulus for the material {material} "
# f"is not processed.")
Expand All @@ -1056,15 +1067,19 @@ def _process_elastic_modulus(self, property_dict, material, mat_id):
f"for material {material}"
)
if self._material_linked_to_zone_type[material] == 'Cohesive':
elastic_modulus += f"TB, GASKET, {mat_id}, 1, 2,elas\n"
elastic_modulus += f"TB, ELAS, {mat_id}, 1, 2,ISOT\n"
elastic_modulus += f"TBDATA, 1, {youngs_mod[0]}, {nu[0]}\n"
# elastic_modulus += f"TB, GASKET, {mat_id}, 1, 2,elas\n"
# elastic_modulus += f"TBDATA, 1, {youngs_mod[0]}, {nu[0]}\n"
else:
elastic_modulus += f"MP,EX,{mat_id},{youngs_mod[0]}\n"
elastic_modulus += f"MP,NUXY,{mat_id},{nu[0]}\n"
else:
if self._material_linked_to_zone_type[material] == 'Cohesive':
elastic_modulus += f"TB, GASKET, {mat_id}, 1, 2,elas\n"
elastic_modulus += f"TB, ELAS, {mat_id}, 1, 2,ISOT\n"
elastic_modulus += f"TBDATA, 1, {youngs_mod[0]}, {nu[0]}\n"
# elastic_modulus += f"TB, GASKET, {mat_id}, 1, 2,elas\n"
# elastic_modulus += f"TBDATA, 1, {youngs_mod[0]}, {nu[0]}\n"
else:
elastic_modulus += f"MP,EX,{mat_id},{youngs_mod[0]}\n"
elastic_modulus += f"MP,NUXY,{mat_id},{nu[0]}\n"
Expand Down Expand Up @@ -1356,14 +1371,16 @@ class _JointMaterialProcessor:
__slots__ = (
'_raw_joint_materials_data',
'_mat_id',
'_enable_hm_comments',
'_property_function_map',
'_model',
'_logger',
)

def __init__(self, model: prime.Model, raw_joint_materials_data):
def __init__(self, model: prime.Model, raw_joint_materials_data, hm_comments=False):
self._raw_joint_materials_data = raw_joint_materials_data
self._mat_id = 0
self._enable_hm_comments = hm_comments
self._property_function_map = {
'CONNECTOR ELASTICITY': self._process_elasticity,
'CONNECTOR DAMPING': self._process_damping,
Expand All @@ -1385,7 +1402,13 @@ def _get_mat_comands(self, material):
self._mat_id = mat_data['id']
processed_entities = ['CONNECTOR ELASTICITY', 'CONNECTOR DAMPING']
# self._logger.info(mat_data)
mapdl_text_data = f"! material '{material}' \n"
mapdl_text_data = ""
hm_comment = self._enable_hm_comments
if hm_comment:
mapdl_text_data += "!!HMNAME MAT \n"
mapdl_text_data += f'!!{self._mat_id:>10} "{material}"\n'
else:
mapdl_text_data += f"! material '{material}' \n"
if "Parameters" in mat_data:
self._logger.warning(f"Parameter on Material {material} are not processed.")
for prop in mat_data:
Expand Down Expand Up @@ -1474,7 +1497,7 @@ def _process_elasticity(self, property_dict, material, mat_id):
else:
if 'RIGID' in comp_data['Parameters']:
ff = comps_linear_mapping[comp_data['Parameters']['COMPONENT']]
elasticity_data += f"TBDATA, {ff}, 1e8\n"
elasticity_data += f"TBDATA, {ff}, 1e6\n"
continue
if 'NONLINEAR' in comp_data['Parameters']:
relative_disp = comp_data['Data']["Displacement"]
Expand Down Expand Up @@ -1642,12 +1665,12 @@ def _process_elasticity(self, property_dict, material, mat_id):
cds = comp_data['Data']['Stiffness'][0]
elasticity_data += f"TBDATA, {clms}, {cds}\n"
else:
elasticity_data += f"TBDATA, 1, 1e8\n"
elasticity_data += f"TBDATA, 7, 1e8\n"
elasticity_data += f"TBDATA, 12, 1e8\n"
elasticity_data += f"TBDATA, 16, 1e8\n"
elasticity_data += f"TBDATA, 19, 1e8\n"
elasticity_data += f"TBDATA, 21, 1e8\n"
elasticity_data += f"TBDATA, 1, 1e6\n"
elasticity_data += f"TBDATA, 7, 1e6\n"
elasticity_data += f"TBDATA, 12, 1e6\n"
elasticity_data += f"TBDATA, 16, 1e6\n"
elasticity_data += f"TBDATA, 19, 1e6\n"
elasticity_data += f"TBDATA, 21, 1e6\n"
return elasticity_data

def _process_damping(self, property_dict, material, mat_id):
Expand Down Expand Up @@ -2668,8 +2691,8 @@ def get_global_damping_commands(self, analysis, msup=False):
damping_commands += f"DMPRAT, {structural/2}\n"
elif analysis == "STEADY STATE DYNAMICS":
damping_commands += f"DMPSTR, {structural}\n"
# elif analysis == "FREQUENCY":
# damping_commands += f"DMPSTR, {structural}\n"
elif analysis == "FREQUENCY":
damping_commands += f"DMPSTR, {structural}\n"
else:
self._logger.warning(
'Global damping under STEP is not processed. Please check the results'
Expand Down Expand Up @@ -3345,12 +3368,12 @@ def get_output_analysis_data(self, output_data):
output_analysis_commands += "OUTRES, ERASE\n"
output_analysis_commands += "OUTRES, ALL, NONE\n"
if time_points:
output_analysis_commands += f"OUTRES, EANGL, %{time_points}%\n"
output_analysis_commands += f"! OUTRES, EANGL, %{time_points}%\n"
else:
# output_analysis_commands += "OUTRES, ALL, NONE\n"
# TODO Removed this line to avoid complications of
# multiple tabular output controls with NINTERVAL
output_analysis_commands += "OUTRES, EANGL, NONE\n"
output_analysis_commands += "! OUTRES, EANGL, NONE\n"
pass
output_analysis_commands += "\n"

Expand Down Expand Up @@ -3852,6 +3875,7 @@ def create_modal_vectors(self):
vector_commands += f'ACEL, 0, 0, 0 \n'
vector_commands += f'\n'
if data_line['node_set'].isnumeric():
cmname = data_line['node_set']
vector_commands += (
f"F, " f"{data_line['node_set']}, " f"{dof_map[dof]}, 1\n"
)
Expand All @@ -3861,10 +3885,10 @@ def create_modal_vectors(self):
)
vector_commands += f"F, " f"{cmname}, " f"{dof_map[dof]}, 1\n"
count_load_vectors += 1
self._modal_load_vectors[count_load_vectors] = {
'SET': cmname,
"COMP": dof_map[dof],
}
self._modal_load_vectors[count_load_vectors] = {
'SET': cmname,
"COMP": dof_map[dof],
}
if "BaseMotion" in step_data:
base_motions_data = step_data['BaseMotion']
for base_motion_data in base_motions_data:
Expand Down Expand Up @@ -4098,7 +4122,7 @@ def _process_step(self, step_data):
mapdl_step_commands += 'DMPOPT, ESAV, NO \n'
mapdl_step_commands += 'DMPOPT, EMAT, NO \n'
mapdl_step_commands += 'DMPOPT, FULL, NO \n'
mapdl_step_commands += 'DMPOPT, MODE, NO \n'
mapdl_step_commands += 'DMPOPT, MODE, YES \n'
mapdl_step_commands += 'DMPOPT, MLV, NO \n'
mapdl_step_commands += '\n'
if self._step_counter == 1:
Expand Down Expand Up @@ -4175,16 +4199,23 @@ class _AxialTempCorrection:
'_connector_sections',
'_connector_behavior',
'_element_wise_csys',
'_enable_hm_comments',
'_model',
'_logger',
)

def __init__(
self, model: prime.Model, connector_sections, connector_behavior, element_wise_csys=False
self,
model: prime.Model,
connector_sections,
connector_behavior,
element_wise_csys=False,
hm_comments=False,
):
self._connector_sections = connector_sections
self._connector_behavior = connector_behavior
self._element_wise_csys = element_wise_csys
self._enable_hm_comments = hm_comments
self._model = model
self._logger = model.python_logger

Expand Down Expand Up @@ -4229,7 +4260,9 @@ def _modify_section_data(self, behavior_data):
secdata_string = ''
if "CONNECTOR CONSTITUTIVE REFERENCE" in behavior_data:
secdata_string += self._modify_section_type(behavior_data)
joint_a_processor = _JointMaterialProcessor(self._model, self._connector_behavior)
joint_a_processor = _JointMaterialProcessor(
self._model, self._connector_behavior, self._enable_hm_comments
)
ref_lens = joint_a_processor._precess_ref_length(
behavior_data["CONNECTOR CONSTITUTIVE REFERENCE"]
)
Expand Down Expand Up @@ -4319,15 +4352,20 @@ def generate_mapdl_commands(
return all_mat_cmds, analysis_settings
if "Materials" in json_simulation_data and json_simulation_data["Materials"] is not None:
mp = _MaterialProcessor(
model, json_simulation_data["Materials"], json_simulation_data["Zones"]
model,
json_simulation_data["Materials"],
json_simulation_data["Zones"],
params.write_separate_blocks,
)
mat_cmds = mp.get_all_material_commands()
all_mat_cmds = mat_cmds
if (
"ConnectorBehavior" in json_simulation_data
and json_simulation_data["ConnectorBehavior"] is not None
):
jmp = _JointMaterialProcessor(model, json_simulation_data["ConnectorBehavior"])
jmp = _JointMaterialProcessor(
model, json_simulation_data["ConnectorBehavior"], params.write_separate_blocks
)
joint_all_mat_cmds = jmp.get_all_material_commands()
all_mat_cmds += joint_all_mat_cmds
general_contact_cmds = ''
Expand Down
3 changes: 3 additions & 0 deletions src/ansys/meshing/prime/internals/error_handling.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,6 +333,9 @@
ErrorCode.AUTOQUADMESHER_INVALIDMINMAXSIZES: "Minimum size is more than maximum size.",
ErrorCode.AUTOQUADMESHER_NEGATIVEINPUTPARAMETER: "Input parameters contain one or more negative values.",
ErrorCode.FACEZONELETSHAVECELLSCONNECTED: "Face zonelets have cells connected.",
ErrorCode.IMPORTABAQUSFAILEDWITHUNKNOWNERROR: "Failed to import abaqus file. Unknown error.",
ErrorCode.IMPORTABAQUSFAILEDWITHPARSINGFAILURE: "Failed to import abaqus file. Failed to parse file.",
ErrorCode.IMPORTABAQUSFAILEDDURINGMESHCREATION: "Failed to import abaqus file. Mesh creation failed after parsing.",
ErrorCode.ZEROELEMENTSREADFROMCDBFILE: "No mesh elements found. Check the input CDB file.",
ErrorCode.ZERONODESREADFROMCDBFILE: "No nodes found. Check the input CDB file.",
ErrorCode.ZEROELEMENTSFORCDBEXPORT: "No mesh elements found for CDB export. Check if the model is meshed, or set write_by_zones in ExportMapdlCdbParams to false if zones are not defined.",
Expand Down
Loading