From bf51582ba771a2453ac6e165c157aa17f4cdd564 Mon Sep 17 00:00:00 2001 From: SteveDoyle2 Date: Thu, 4 Jan 2024 12:10:48 -0800 Subject: [PATCH] nastran_gui3 - adding SPC/SPCADD support --- .../dev/bdf_vectorized3/alt_actor_builder.py | 151 ++++++--- pyNastran/dev/bdf_vectorized3/bdf.py | 13 +- .../dev/bdf_vectorized3/cards/constraints.py | 87 +++++- pyNastran/op2/result_objects/table_object.py | 292 ++++++++++++++++++ 4 files changed, 490 insertions(+), 53 deletions(-) diff --git a/pyNastran/dev/bdf_vectorized3/alt_actor_builder.py b/pyNastran/dev/bdf_vectorized3/alt_actor_builder.py index 8e7ef75dc..38fd38e62 100644 --- a/pyNastran/dev/bdf_vectorized3/alt_actor_builder.py +++ b/pyNastran/dev/bdf_vectorized3/alt_actor_builder.py @@ -1,3 +1,4 @@ +"""Creates a series of Nastran-themed vtkActors""" from __future__ import annotations from itertools import count from typing import TYPE_CHECKING @@ -24,12 +25,14 @@ #from pyNastran.gui.gui_objects.gui_result import GuiResult# , NormalResult #from pyNastran.gui.gui_objects.displacements import ForceTableResults, ElementalTableResults +from pyNastran.dev.bdf_vectorized3.cards.constraints import spc_cards_to_nid_dof from pyNastran.dev.bdf_vectorized3.cards.elements.shell_coords import ( get_shell_element_coordinate_system, get_shell_material_coordinate_system) if TYPE_CHECKING: # pragma: no cover - from pyNastran.dev.op2_vectorized3.bdf import BDF - from pyNastran.dev.op2_vectorized3.bdf_interface.bdf_attributes import ( + from pyNastran.dev.bdf_vectorized3.bdf import BDF + from pyNastran.dev.bdf_vectorized3.bdf_interface.bdf_attributes import ( + #SPC, SPC1, AECOMP, AECOMPL, SET1, RBE2, RBE3 #, GRID ) from pyNastran.gui.main_window import MainWindow @@ -40,11 +43,31 @@ def create_alt_spcs(gui: MainWindow, model: BDF, grid_id: np.ndarray, xyz_cid0: np.ndarray) -> None: + """ + Creates SPC/SPC1 constraints: + - Single DOF Set + - All SPCs id=1; dof=123456 + - Multiple DOF Sets + - All SPCs id=2 + - SPCs id=2; dof=123 + - SPCs id=2; dof=456 + + Creates SPCADD constraints: + - Single DOF Set + - All SPCADD id=1; dof=123456 + - Multiple DOF Sets + - All SPCADD id=2 + - SPCADD id=2; dof=123 + - SPCADD id=2; dof=456 + + TODO: no support for enforced + """ + spcadd = model.spcadd cards_all = (model.spc, model.spc1) cards = [card for card in cards_all if len(card)] ncards = sum([len(card) for card in cards]) - # TODO: add support for SPCADD - if ncards == 0: + + if ncards == 0: # and len(spcadd) == 0: return spc_ids_list = [] @@ -54,33 +77,23 @@ def create_alt_spcs(gui: MainWindow, uspc_id = np.unique(spc_ids) for spc_id in uspc_id: - comp_list = [] - nids_list = [] - for card in cards: - if spc_id not in card.spc_id: - continue - spc = card.slice_card_by_id(spc_id) - comp_list.append(spc.components) - nids_list.append(spc.node_id) - comp = np.hstack(comp_list) - nids = np.hstack(nids_list) - + is_failed, nids, comp = spc_cards_to_nid_dof(spc_id, cards) + if is_failed: + continue unids_all = np.unique(nids) ucomp_all = np.unique(comp) + inid = np.searchsorted(grid_id, unids_all) + xyz_cid0s = xyz_cid0[inid, :] if len(ucomp_all) == 1: # single dof; don't need "ALL SPCs" cause this # case all has the same DOFs - ucomp = unids_all[0] + ucomp = ucomp_all[0] name = f'All SPCs id={spc_id:g}; dof={ucomp}' - inid = np.searchsorted(grid_id, unids_all) - xyz_cid0s = xyz_cid0[inid, :] _build_dots(gui, name, xyz_cid0s) else: # all nodes name = f'All SPCs id={spc_id:g}' - inid = np.searchsorted(grid_id, unids_all) - xyz_cid0s = xyz_cid0[inid, :] _build_dots(gui, name, xyz_cid0s) # SPCs by DOF @@ -92,15 +105,49 @@ def create_alt_spcs(gui: MainWindow, xyz_cid0s = xyz_cid0[inid, :] _build_dots(gui, name, xyz_cid0s) - x = 1 + if len(spcadd): + reduced_spc_dict = spcadd.get_reduced_spcs() + spcadd_node_component_dict = spcadd.get_reduced_node_component(reduced_spc_dict) + #get_reduced_spcs + #spcs_dict = model.spcadd.get_spcs_by_spc_id() + for spcadd_id, (nids, comp) in spcadd_node_component_dict.items(): + unids_all = np.unique(nids) + ucomp_all = np.unique(comp) + + inid = np.searchsorted(grid_id, unids_all) + xyz_cid0s = xyz_cid0[inid, :] + if len(ucomp_all) == 1: + # single dof; don't need "ALL SPCs" cause this + # case all has the same DOFs + ucomp = ucomp_all[0] + name = f'All SPCADD id={spcadd_id:g}; dof={ucomp}' + _build_dots(gui, name, xyz_cid0s) + else: + # all nodes + name = f'All SPCADD id={spcadd_id:g}' + _build_dots(gui, name, xyz_cid0s) + # SPCs by DOF + for ucompi in ucomp_all: + name = f'SPCADD id={spcadd_id:g}; DOF={ucompi}' + icomp = np.where(comp == ucompi)[0] + unids = nids[icomp] + inid = np.searchsorted(grid_id, unids) + xyz_cid0s = xyz_cid0[inid, :] + _build_dots(gui, name, xyz_cid0s) + #x = 1 + pass def create_alt_conm2_grids(gui: MainWindow, model: BDF, grid_id: np.ndarray, xyz_cid0: np.ndarray) -> None: - element = model.conm2 + """ + Creates: + - global CG actor + - idividual CONM2 actors + """ try: mass_total, cg, inertia = model.inertia_sum() except Exception as e: @@ -115,6 +162,7 @@ def create_alt_conm2_grids(gui: MainWindow, #print('inertia =', inertia) _build_dot(gui, name, cg) + element = model.conm2 if element.n == 0: return @@ -143,6 +191,16 @@ def create_alt_rbe3_grids(gui: MainWindow, model: BDF, grid_id: np.ndarray, xyz_cid0: np.ndarray) -> None: + """ + Creates global actors for: + - RBE3 independents + - RBE3 reference dependents + - RBE3 UM dependents + - RBE3 lines + + TODO: add individual support + + """ elem = model.rbe3 if elem.n == 0: return @@ -236,11 +294,11 @@ def create_alt_axes(self: NastranIO, grid_id: np.ndarray, xyz_cid0: np.ndarray): """ - creates: + Creates: - shell element coordinate systems - shell material coordinate systems - creates orientation vectors for: + Creates orientation vectors for: - CBAR - CBEAM - CBUSH @@ -270,7 +328,7 @@ def _create_shell_axes(self: NastranIO, grid_id: np.ndarray, xyz_cid0: np.ndarray) -> None: """ - creates: + Creates: - shell element coordinate systems - shell material coordinate systems """ @@ -320,7 +378,7 @@ def _create_alt_axes(self: NastranIO, xyz_cid0: np.ndarray, elem, card_name: str) -> None: """ - creates orientation vectors for: + Creates orientation vectors for: - CBAR - CBEAM - CBUSH @@ -402,7 +460,7 @@ def _add_nastran_bar_vectors_to_grid(gui: MainWindow, name: str, lines: np.ndarray, eids: np.ndarray) -> None: - """creates the bar orientation vector lines + """Creates the bar orientation vector lines Parameters ---------- @@ -444,6 +502,13 @@ def create_alt_rbe2_grids(gui: MainWindow, model: BDF, grid_id: np.ndarray, xyz_cid0: np.ndarray): + """ + Creates global & individual actors for: + - RBE2 independents + - RBE2 dependents + - RBE2 lines + + """ elem = model.rbe2 if elem.n == 0: return @@ -758,7 +823,7 @@ def create_aesurf(gui: MainWindow, def create_monpnt1(gui: MainWindow, model: BDF, grid_id: np.ndarray, - xyz_cid0: np.ndarray): + xyz_cid0: np.ndarray) -> None: """ Creates MONPNT1 actors - points @@ -890,7 +955,7 @@ def _create_monpnt_aecomp_set1(gui: MainWindow, all_nids: np.ndarray, xyz_cid0: np.ndarray, name: str, label: str, - comp: str, xyz_global: np.ndarray): + comp: str, xyz_global: np.ndarray) -> None: """ Creates MONPNT1 actors - points @@ -923,11 +988,12 @@ def _create_monpnt_aecomp_aelist(gui: MainWindow, aero_element_ids: np.ndarray, aero_elements: np.ndarray, name: str, label: str, - comp: str, xyz_global: np.ndarray): + comp: str, xyz_global: np.ndarray) -> None: """ Creates MONPNT1 actors - aelist panels - xyz summation + """ aecomp = model.aecomp @@ -971,7 +1037,9 @@ def _create_monpnt_aecomp_aelist(gui: MainWindow, def _build_dot(gui: MainWindow, name: str, xyzi: np.ndarray, - point_size: int=3, color=RED_FLOAT, is_visible: bool=False): + point_size: int=3, color=RED_FLOAT, + is_visible: bool=False) -> None: + """creates a vtkActor with 1 point""" j = 0 gui.create_alternate_vtk_grid( name, color=color, point_size=point_size, opacity=1.0, @@ -994,7 +1062,9 @@ def _build_dot(gui: MainWindow, name: str, xyzi: np.ndarray, def _build_dots(gui: MainWindow, name: str, xyzs: np.ndarray, - point_size: int=3, color=RED_FLOAT, is_visible: bool=False) -> None: + point_size: int=3, color=RED_FLOAT, + is_visible: bool=False) -> None: + """creates a vtkActor with N points""" assert len(xyzs.shape) == 2, xyzs.shape gui.create_alternate_vtk_grid( name, color=color, point_size=point_size, opacity=1.0, @@ -1020,6 +1090,7 @@ def _build_lines(gui: MainWindow, name: str, line_width: int=3, color=RED_FLOAT, representation: str = 'wire', is_visible: bool=True) -> None: + """Creates a vtkActor with N lines""" assert len(xyzs.shape) == 2, xyzs.shape assert len(nodes_index.shape) == 2, nodes_index.shape nelement = nodes_index.shape[0] @@ -1045,8 +1116,9 @@ def _build_quads(gui: MainWindow, name: str, nodes_index: np.ndarray, line_width: int=3, color=RED_FLOAT, opacity: float=1.0, - is_visible: bool=True, - representation: str='wire+surf') -> None: + representation: str='wire+surf', + is_visible: bool=True) -> None: + """Creates a vtkActor with N quads""" assert len(xyzs.shape) == 2, xyzs.shape assert len(nodes_index.shape) == 2, nodes_index.shape assert nodes_index.shape[1] == 4, nodes_index.shape @@ -1072,10 +1144,15 @@ def _build_vtk_data_from_dnode(alt_grid: vtkUnstructuredGrid, xyz: np.ndarray, nnodes: np.ndarray, nodes_index: np.ndarray, - nelement: int, cell_typei: int, dnode: int) -> None: + nelement: int, cell_type: int, dnode: int) -> None: + """ + Creates a vtkUnstructuredGrid with a given cell_type for nelements. + This helps to avoid the complexity of vtk requiring the number of + nodes listed for each element and makes vectorization easy. + """ points = numpy_to_vtk_points(xyz) - cell_type = np.ones(nelement, dtype='int64') * cell_typei + cell_type_ = np.ones(nelement, dtype='int64') * cell_type n_nodes = np.hstack([nnodes, nodes_index]).ravel() # (nnodes+1) = 4+1 = 5 @@ -1089,7 +1166,7 @@ def _build_vtk_data_from_dnode(alt_grid: vtkUnstructuredGrid, nelement_total = nelement build_vtk_geometry( nelement_total, alt_grid, - n_nodes, cell_type, cell_offset) + n_nodes, cell_type_, cell_offset) alt_grid.SetPoints(points) def build_vtk_geometry(nelement: int, diff --git a/pyNastran/dev/bdf_vectorized3/bdf.py b/pyNastran/dev/bdf_vectorized3/bdf.py index 0687cb0a2..c4d51e07c 100644 --- a/pyNastran/dev/bdf_vectorized3/bdf.py +++ b/pyNastran/dev/bdf_vectorized3/bdf.py @@ -2280,6 +2280,7 @@ def add_card(cls, card, comment=''): # tables 'TABLES1' : (TABLES1, add_methods._add_table_object), 'TABLEST' : (TABLEST, add_methods._add_table_object), + 'TABLEHT' : (RuntimeCrash, None), #'TABLEHT' : (TABLEHT, add_methods._add_table_object), 'TABLEH1' : (TABLEH1, add_methods._add_table_object), @@ -2310,10 +2311,14 @@ def add_card(cls, card, comment=''): 'MBOLTUS': (RuntimeCrash, None), 'RADMT': (RuntimeCrash, None), + 'MATS3' : (RuntimeCrash, None), + 'MATS8' : (RuntimeCrash, None), #'RADMTX' : (RADMTX, add_methods._add_radmtx_object), # TestOP2.test_bdf_op2_thermal_02 #'SESUP' : (SESUP, add_methods._add_sesup_object), # pseudo-constraint + 'SEUSET' : (RuntimeCrash, None), + 'SEUSET1' : (RuntimeCrash, None), #'SEUSET' : (SEUSET, add_methods._add_seuset_object), #'SEUSET1' : (SEUSET1, add_methods._add_seuset_object), @@ -3462,7 +3467,9 @@ def get_bdf_stats(self, return_type: str='string') -> Union[str, list[str]]: def get_displacement_index_xyz_cp_cd(self, fdtype: str='float64', idtype: str='int32', - sort_ids: bool=True) -> Any: + sort_ids: bool=True) -> tuple[dict[int, np.ndarray], + dict[int, np.ndarray], + np.ndarray, np.ndarray]: # pramga: no cover """ Get index and transformation matricies for nodes with their output in coordinate systems other than the global. @@ -3577,7 +3584,7 @@ def get_displacement_index_xyz_cp_cd(self, fdtype: str='float64', def get_xyz_in_coord_array(self, cid: int=0, fdtype: str='float64', idtype: str='int32') -> tuple[np.ndarray, np.ndarray, np.ndarray, - dict[int, np.ndarray], dict[int, np.ndarray]]: + dict[int, np.ndarray], dict[int, np.ndarray]]: # pramga: no cover """ Gets the xyzs as an array in an arbitrary coordinate system @@ -3630,7 +3637,7 @@ def transform_xyzcp_to_xyz_cid(self, xyz_cp: np.ndarray, icp_transform: dict[int, np.ndarray], cid: int=0, in_place: bool=False, - atol: float=1e-6) -> np.ndarray: + atol: float=1e-6) -> np.ndarray: # pramga: no cover """ Vectorized method for calculating node locations in an arbitrary coordinate system. diff --git a/pyNastran/dev/bdf_vectorized3/cards/constraints.py b/pyNastran/dev/bdf_vectorized3/cards/constraints.py index 46b585735..b37f0539b 100644 --- a/pyNastran/dev/bdf_vectorized3/cards/constraints.py +++ b/pyNastran/dev/bdf_vectorized3/cards/constraints.py @@ -12,9 +12,7 @@ from pyNastran.bdf.bdf_interface.assign_type import ( integer, integer_or_blank, double, double_or_blank, components_or_blank, parse_components,) -#from pyNastran.bdf.field_writer_8 import print_card_8 -from pyNastran.bdf.field_writer_16 import print_card_16 # print_float_16 -#from pyNastran.bdf.field_writer_double import print_scientific_double + from pyNastran.dev.bdf_vectorized3.cards.base_card import ( remove_unused_primary, remove_unused_duplicate, parse_check) from pyNastran.dev.bdf_vectorized3.bdf_interface.geom_check import geom_check @@ -22,7 +20,6 @@ #array_default_str, array_str, array_default_int, get_print_card_size) -from pyNastran.dev.bdf_vectorized3.utils import print_card_8 if TYPE_CHECKING: # pragma: no cover from pyNastran.bdf.bdf_interface.bdf_card import BDFCard @@ -51,10 +48,10 @@ def clear(self) -> None: self.components = np.array([], dtype='int32') self.enforced = np.array([], dtype='float64') - def slice_card_by_index(self, i: np.ndarray, **kwargs) -> SPC: - spc = SPC(self.model) - self.__apply_slice__(spc, i) - return spc + #def slice_card_by_index(self, i: np.ndarray, **kwargs) -> SPC: + #spc = SPC(self.model) + #self.__apply_slice__(spc, i) + #return spc def __apply_slice__(self, spc: SPC, i: np.ndarray) -> None: spc.n = len(i) @@ -308,10 +305,10 @@ def slice_card_by_id(self, spc_id: int, **kwargs) -> SPC1: self.__apply_slice__(spc, i) return spc - def slice_card_by_index(self, i: np.ndarray) -> SPC1: - spc = SPC1(self.model) - self.__apply_slice__(spc, i) - return spc + #def slice_card_by_index(self, i: np.ndarray) -> SPC1: + #spc = SPC1(self.model) + #self.__apply_slice__(spc, i) + #return spc def __apply_slice__(self, spc: SPC1, i: np.ndarray) -> None: spc.n = len(i) @@ -792,10 +789,34 @@ def get_reduced_spcs(self, log.error(msg) if stop_on_failure: raise RuntimeError(msg) - reduced_spcsi.append(spcs_found) + reduced_spcsi.extend(spcs_found) reduced_spcs[sid] = reduced_spcsi return reduced_spcs + def get_reduced_node_component(self, + reduced_spc_dict: dict[int, list[SPCs]], + ) -> dict[int, tuple[np.ndarray, np.ndarray]]: + """Gets the node/component dict for an SPCADD""" + compressed_spc_dict = {} + for spcadd_id, cards2 in sorted(reduced_spc_dict.items()): + all_spc_ids_list = [] + for card in cards2: + #all_spc_ids_list.append(card.components) + all_spc_ids_list.append(card.spc_id) + uspc_ids = np.unique(np.hstack(all_spc_ids_list)) + + nids_all_list = [] + comp_all_list = [] + for spc_idi in uspc_ids: + is_failed, nidsi, compsi = spc_cards_to_nid_dof(spc_idi, cards2) + if is_failed: + continue + nids_all_list.append(nidsi) + comp_all_list.append(compsi) + nids = np.hstack(nids_all_list) + comp = np.hstack(comp_all_list) + compressed_spc_dict[spcadd_id] = (nids, comp) + return compressed_spc_dict class MPCADD(ADD): _id_name = 'mpc_id' @@ -1083,5 +1104,45 @@ class BNDGRID(CommonSet): #self.add_set_card(card, comment='') +def spc_cards_to_nid_dof(spc_id: int, + cards: list[Union[SPC, SPC1]], + ) -> tuple[bool, np.ndarray, np.ndarray]: + """helper for making SPCs/SPCADD node/component""" + comp_list = [] + nids_list = [] + is_failed = True + for card in cards: + if spc_id not in card.spc_id: + continue + spc = card.slice_card_by_id(spc_id) + + if len(spc.components) == len(spc.node_id): + component = spc.components + comp_list.append(component) + nids_list.append(spc.node_id) + else: + assert spc.type == 'SPC1', spc + for i, nnode, (inode0, inode1) in zip(count(), spc.nnodes, spc.inode): + componenti = spc.components[i] + component = np.ones(nnode, dtype='int32') * componenti + node_id = spc.node_id[inode0:inode1] + comp_list.append(component) + nids_list.append(node_id) + + comp = np.hstack(comp_list) + nids = np.hstack(nids_list) + assert len(comp) == len(nids) + del comp, nids + + #x = 1 + if len(comp_list) == 0 and len(nids_list) == 0: + return is_failed, nids_list, comp_list + + comp = np.hstack(comp_list) + nids = np.hstack(nids_list) + assert len(comp) == len(nids) + is_failed = False + return is_failed, nids, comp + SPCs = Union[SPC, SPC1] diff --git a/pyNastran/op2/result_objects/table_object.py b/pyNastran/op2/result_objects/table_object.py index d47a1f63d..7f749fdd5 100644 --- a/pyNastran/op2/result_objects/table_object.py +++ b/pyNastran/op2/result_objects/table_object.py @@ -1216,6 +1216,298 @@ def write_csv(self, csv_file: TextIO, f'{t1i}, {t2i}, {t3i}, {r1i}, {r2i}, {r3i}, {cd}, {gridtypei}\n') return + def write_frd(self, frd_file: TextIO, + is_exponent_format: bool=False, + is_mag_phase: bool=False, is_sort1: bool=True, + write_header: bool=True): + """ + https://web.mit.edu/calculix_v2.7/CalculiX/cgx_2.7/doc/cgx/node174.html + + Nodal Results Block + Purpose: Stores values on node positions + + 1. Record: + Format:(1X,' 100','C',6A1,E12.5,I12,20A1,I2,I5,10A1,I2) + Values: KEY,CODE,SETNAME,VALUE,NUMNOD,TEXT,ICTYPE,NUMSTP,ANALYS, + FORMAT + Where: KEY = 100 + CODE = C + SETNAME= Name (not used) + VALUE = Could be frequency, time or any numerical value + NUMNOD = Number of nodes in this nodal results block + TEXT = Any text + ICTYPE = Analysis type + 0 static + 1 time step + 2 frequency + 3 load step + 4 user named + NUMSTP = Step number + ANALYS = Type of analysis (description) + FORMAT = Format indicator + 0 short format + 1 long format + 2 binary format + + #-------------------------------------------------------------------------- + 2. Record: + Format:(1X,I2,2X,8A1,2I5) + Values: KEY, NAME, NCOMPS, IRTYPE + Where: KEY = -4 + NAME = Dataset name to be used in the menu + NCOMPS = Number of entities + IRTYPE = 1 Nodal data, material independent + 2 Nodal data, material dependant + 3 Element data at nodes (not used) + + #-------------------------------------------------------------------------- + 3. Type of Record: + Format:(1X,I2,2X,8A1,5I5,8A1) + Values: KEY, NAME, MENU, ICTYPE, ICIND1, ICIND2, IEXIST, ICNAME + Where: KEY = -5 + NAME = Entity name to be used in the menu for this comp. + MENU = 1 + ICTYPE = Type of entity + 1 scalar + 2 vector with 3 components + 4 matrix + 12 vector with 3 amplitudes and 3 phase-angles in + degree + ICIND1 = sub-component index or row number + ICIND2 = column number for ICTYPE=4 + IEXIST = 0 data are provided + 1 data are to be calculated by predefined + functions (not used) + 2 as 0 but earmarked + ICNAME = Name of the predefined calculation (not used) + ALL calculate the total displacement if ICTYPE=2 + This record must be repeated for each entity. + + 4. Type of Record: (not used) + This record will be necessary in combination with the request for + predefined calculations. This type of record is not allowed in + combination with binary coding of data. + Format:(1X,I2,2I5,20I3) + Values: KEY,IRECTY,NUMCPS,(LSTCPS(I),I=1,NUMCPS) + Where: KEY = -6 + IRECTY = Record variant identification number + NUMCPS = Number of components + LSTCPS = For each variant component, the position of the + corresponding component in attribute definition + #-------------------------------------------------------------------------- + + 5. Type of Record: + The following records are data-records and the format is repeated + for each node. + + In case of material independent data + + - ascii coding: + Following records (ascci, FORMAT=0 | 1): + Short Format:(1X,I2,I5,6E12.5) + Long Format:(1X,I2,I10,6E12.5) + Values: KEY, NODE, XX.. + Where: KEY = -1 if its the first line of data for a given node + -2 if its a continuation line + NODE = node number or blank if KEY=-2 + XX.. = data + + - binary coding: + Following records (ascci, FORMAT=2): + (int,NCOMPS*float) + int and float are ansi-c data-types + Values: NODE, XX.. + Where: + NODE = node number or blank if KEY=-2 + XX.. = data + + In case of material dependant data + REMARK: Implemented only for NMATS=1 + - first line: + Short Format:(1X,I2,4I5) + Long Format:(1X,I2,I10,3I5) + Values: KEY, NODENR, NMATS + Where: KEY = -1 + NODENR = Node number + NMATS = Number of different materials at this node(unused) + - second and following lines: + Short Format:(1X,I2,I5,6E12.5) + Long Format:(1X,I2,I10,6E12.5) + Values: KEY, MAT, XX, YY, ZZ, XY, YZ, ZX .. + Where: KEY = -2 + MAT = material-property-number if KEY=-2 (unused) + XX.. = data + + + Last Record (only FORMAT=0 | 1 (ascii), omitted for FORMAT=2): + Format:(1X,'-3') + Values: KEY + Displacement Table + ------------------ + Flag, SubcaseID, iTime, NID, dx, dy, dz, rx, ry, rz, cd, PointType + 1, 1, 0, 101, 0.014159, 0.03448, 0.019135, 0.00637, 0.008042, 0.00762, 0, 1 + uses cd=-1 for unknown cd + + """ + name = str(self.__class__.__name__) + name_map = { + 'RealDisplacementArray': 'Displacement', + 'RealSPCForcesArray': 'SPC_Force', + 'RealMPCForcesArray': 'MPC_Force', + } + name = name_map[name] + #if write_header: + #csv_file.write('%s\n' % name) + #headers = ['Flag', 'Subcase', 'iTime', 'Node', ] + self.headers + ['cd', 'PointType'] + #csv_file.write('# ' + ','.join(headers) + '\n') + + node = self.node_gridtype[:, 0] + #gridtype = self.node_gridtype[:, 1] + + #unused_times = self._times + #isubcase = self.isubcase + + # sort1 as sort1 + assert is_sort1 is True, is_sort1 + #nid_len = '%d' % len(str(node.max())) + #cd = -1 + num_step, nnode = self.data.shape[:2] + for itime in range(self.ntimes): + #dt = self._times[itime] + #1. Record: + #Format:(1X, ' 100', 'C', 6A1, E12.5, I12, 20A1,I2,I5,10A1,I2) + #Values: KEY, CODE, SETNAME, VALUE, NUMNOD, TEXT,ICTYPE,NUMSTP,ANALYS, + # FORMAT + #Where: KEY = 100 + # CODE = C + # SETNAME= Name (not used) + # VALUE = Could be frequency, time or any numerical value + # NUMNOD = Number of nodes in this nodal results block + # TEXT = Any text + # ICTYPE = Analysis type + # 0 static + # 1 time step + # 2 frequency + # 3 load step + # 4 user named + # NUMSTP = Step number + # ANALYS = Type of analysis (description) + # FORMAT = Format indicator + # 0 short format + # 1 long format + # 2 binary format + # basically table 3 + key = 100 + + text = name + text_str = f'{text:<20s}' + assert len(text_str) == 20, len(text_str) + value = self._times[0] + map_analysis = { + # displacement -> ??? + 1: 1, + } + analysis = map_analysis[self.analysis_code] + + code = 'C' + ic_type = '0' + data_format = 2 + set_name = 'SET_NAME' + num_mod = nnode + frd_file.write(f'RECORD 1: {key} {code} {set_name} {value} {num_mod} {text} {ic_type} {num_step} {analysis} {data_format}\n') + + # 2. Record: + # Format:(1X, I2, 2X,8A1,2I5) + # Values: KEY, NAME, NCOMPS, IRTYPE + # Where: KEY = -4 + # NAME = Dataset name to be used in the menu + # NCOMPS = Number of entities + # IRTYPE = 1 Nodal data, material independent + # 2 Nodal data, material dependant + # 3 Element data at nodes (not used) + key = -4 + name = f'disp{itime:d}' + ncomps = 6 + ir_type = 1 + frd_file.write(f'RECORD 2: {key} {name} {ncomps} {ir_type}\n') + + # 3. Type of Record: + # Format:(1X,I2,2X,8A1,5I5,8A1) + # Values: KEY, NAME, MENU, ICTYPE, ICIND1, ICIND2, IEXIST, ICNAME + # Where: KEY = -5 + # NAME = Entity name to be used in the menu for this comp. + # MENU = 1 + # ICTYPE = Type of entity + # 1 scalar + # 2 vector with 3 components + # 4 matrix + # 12 vector with 3 amplitudes and 3 phase-angles in + # degree + # ICIND1 = sub-component index or row number + # ICIND2 = column number for ICTYPE=4 + # IEXIST = 0 data are provided + # 1 data are to be calculated by predefined + # functions (not used) + # 2 as 0 but earmarked + # ICNAME = Name of the predefined calculation (not used) + # ALL calculate the total displacement if ICTYPE=2 + # This record must be repeated for each entity + key = -5 + name = 'Translation' + menu = 1 + ic_type = 2 + iexist = 0 + ic_name = 'Translation' + icind1 = itime + icind2 = 0 + frd_file.write(f'RECORD 3: {key} {name} {menu} {ic_type} {icind1} {icind2} {iexist} {ic_name}\n') + + # 5. Type of Record: + # The following records are data-records and the format is repeated + # for each node. + # + # In case of material independent data + # + # - ascii coding: + # Following records (ascii, FORMAT=0 | 1): + # Short Format:(1X,I2,I5,6E12.5) + # Long Format:(1X,I2,I10,6E12.5) + # Values: KEY, NODE, XX.. + # Where: KEY = -1 if its the first line of data for a given node + # -2 if its a continuation line + # NODE = node number or blank if KEY=-2 + # XX.. = data + # + # - binary coding: + # Following records (ascci, FORMAT=2): + # (int,NCOMPS*float) + # int and float are ansi-c data-types + # Values: NODE, XX.. + # Where: + # NODE = node number or blank if KEY=-2 + # XX.. = data + + t1 = self.data[itime, :, 0] + t2 = self.data[itime, :, 1] + t3 = self.data[itime, :, 2] + r1 = self.data[itime, :, 3] + r2 = self.data[itime, :, 4] + r3 = self.data[itime, :, 5] + for node_id, t1i, t2i, t3i in zip(node, t1, t2, t3): + frd_file.write(f'RECORD 5: 1 -1 {node_id} {t1i:12.5E} {t2i:12.5E} {t3i:12.5E}\n') + + # This record must be repeated for each entity + #key = -5 + name = 'Rotation' + #menu = 1 + #ic_type = 2 + #iexist = 0 + ic_name = 'Rotation' + frd_file.write(f'RECORD 3: {key} {name} {menu} {ic_type} {icind1} {icind2} {iexist} {ic_name}\n') + for node_id, r1i, r2i, r3i in zip(node, r1, r2, r3): + frd_file.write(f'RECORD 5: 1 -1 {node_id} {r1i:12.5E} {r2i:12.5E} {r3i:12.5E}\n') + return + def _write_f06_block(self, words, header, page_stamp, page_num, f06_file: TextIO, write_words, is_mag_phase: bool=False, is_sort1: bool=True):