From c93be2aad67181ec0d7daebe02ffb7dcd7da39e1 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Thu, 21 Nov 2024 15:37:04 +0100 Subject: [PATCH 01/17] add tutorials files --- .../extract_and_explore_results_data.rst | 2 ++ .../extract_and_explore_results_metadata.rst | 20 +++++++++++ .../import_data/import_result_file.rst | 2 ++ .../tutorials/import_data/index.rst | 35 +++++++++++++------ .../import_data/represent_data_on_dpf.rst | 2 ++ 5 files changed, 51 insertions(+), 10 deletions(-) create mode 100644 doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst create mode 100644 doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst create mode 100644 doc/source/user_guide/tutorials/import_data/import_result_file.rst create mode 100644 doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst new file mode 100644 index 0000000000..09c984905a --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst @@ -0,0 +1,2 @@ +.. _ref_tutorials_extract_and_explore_results_data: + diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst new file mode 100644 index 0000000000..630ba1bfbf --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst @@ -0,0 +1,20 @@ +.. _ref_tutorials_extract_and_explore_results_metadata: + +======================== +Explore results metadata +======================== + +.. |Field| replace:: :class:`Field` + +When you extract a result from a result file DPF stores it in a |Field|. +This |Field| will then contain the metadata for the result it is associated with. + +The metadata includes the location, the scoping, the shape of the data stored, +number of components, and units of the data. + +This tutorial shows how to extract and explore results metadata extracted +from a result file. + +Get the results +--------------- + diff --git a/doc/source/user_guide/tutorials/import_data/import_result_file.rst b/doc/source/user_guide/tutorials/import_data/import_result_file.rst new file mode 100644 index 0000000000..9e9c2fc2ed --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/import_result_file.rst @@ -0,0 +1,2 @@ +.. _ref_tutorials_import_result_file: + diff --git a/doc/source/user_guide/tutorials/import_data/index.rst b/doc/source/user_guide/tutorials/import_data/index.rst index 112339d5a5..a2d51f2beb 100644 --- a/doc/source/user_guide/tutorials/import_data/index.rst +++ b/doc/source/user_guide/tutorials/import_data/index.rst @@ -23,11 +23,11 @@ From user input Learn how to import data in DPF from csv file .. grid-item-card:: Represent your data in DPF - :link: ref_tutorials + :link: ref_tutorials_represent_data_on_dpf :link-type: ref :text-align: center - Learn how to represent your manual input data in a DPF data storage structure + Learn how to represent your manual input data in a DPF data storage structures From result files ***************** @@ -37,29 +37,44 @@ From result files :padding: 2 :margin: 2 + .. grid-item-card:: Import a result file in DPF + :link: ref_tutorials_import_result_file + :link-type: ref + :text-align: center + + This tutorial shows how to import a result file in DPF + .. grid-item-card:: Extract and explore results metadata - :link: ref_tutorials + :link: ref_tutorials_extract_and_explore_results_metadata :link-type: ref :text-align: center - This tutorial + This tutorial shows how to extract and explore results metadata (unit, + location, the scoping, the shape of the data stored ... ) extracted + from a result file. - .. grid-item-card:: Extract and explore results - :link: ref_tutorials + .. grid-item-card:: Extract and explore results data + :link: ref_tutorials_extract_and_explore_results_data :link-type: ref :text-align: center - This tutorial + This tutorial shows how to extract and explore results data from a result file. - .. grid-item-card:: Narrow down data (scoping tutorial) - :link: ref_tutorials + .. grid-item-card:: Narrow down data + :link: reft_tutorials_narrow_down_data :link-type: ref :text-align: center - This tutorial + This tutorial explains how to scope (get a spatial and/or temporal subset of + the simulation data) your results. .. toctree:: :maxdepth: 2 :hidden: + represent_data_on_dpf.rst + import_result_file.rst + extract_and_explore_results_metadata.rst + extract_and_explore_results_data.rst + narrow_down_data.rst \ No newline at end of file diff --git a/doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst b/doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst new file mode 100644 index 0000000000..38a6299292 --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst @@ -0,0 +1,2 @@ +.. _ref_tutorials_represent_data_on_dpf: + From bb54577d4cd43217ad4830f8a39bd940339645ec Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Thu, 21 Nov 2024 16:07:43 +0100 Subject: [PATCH 02/17] add narrow_down_data.rst tutorial --- .../import_data/narrow_down_data.rst | 293 ++++++++++++++++++ 1 file changed, 293 insertions(+) create mode 100644 doc/source/user_guide/tutorials/import_data/narrow_down_data.rst diff --git a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst new file mode 100644 index 0000000000..a30ce905e6 --- /dev/null +++ b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst @@ -0,0 +1,293 @@ +.. _reft_tutorials_narrow_down_data: + +================ +Narrow down data +================ + +.. |Field| replace:: :class:`Field` +.. |FieldsContainer| replace:: :class:`FieldsContainer` +.. |Scoping| replace:: :class:`Scoping` +.. |MeshedRegion| replace:: :class:`MeshedRegion ` +.. |time_freq_scoping_factory| replace:: :mod:`time_freq_scoping_factory` +.. |mesh_scoping_factory| replace:: :mod:`mesh_scoping_factory` +.. |Model| replace:: :class:`Model ` +.. |displacement| replace:: :class:`result.displacement ` +.. |Model.results| replace:: :func:`Model.results ` +.. |Examples| replace:: :mod:`Examples` +.. |result op| replace:: :mod:`result` +.. |Result| replace:: :class:`Result ` +.. |rescope| replace:: :class:`rescope ` +.. |from_mesh| replace:: :class:`from_mesh ` +.. |extract_scoping| replace:: :class:`extract_scoping ` + +To begin the workflow set up, you need to establish the ``scoping``, that is +a spatial and/or temporal subset of the simulation data. This tutorial explains +how to scope your results over time and mesh domains. + +Understanding a scope +--------------------- + +The data in DPF is represented by a |Field|. Thus, narrow down your results means scoping your |Field|. +To do so in DPF you use the |Scoping| object. For more information on the DPF data storage structures +see :ref:`ref_tutorials_data_structures`. + +The |Field| scoping also defines how the data is ordered, for example: the first +ID in the scoping identifies to which entity the first data entity belongs. + +In conclusion, the essence of the scoping is to specify the set of time or mesh entities by defining a range of IDs: + +.. image:: ../../../images/drawings/scoping-eg.png + :align: center + +Create a |Scoping| +------------------ + +The |Scoping| object can be created by: + +- Instantiating the |Scoping| class (giving the location and the entities ids as arguments) +- Using a scoping factory (|time_freq_scoping_factory| methods for a temporal scoping + and |mesh_scoping_factory| for spatial scoping). + +.. code-block:: python + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + +Time scoping +^^^^^^^^^^^^ + +.. code-block:: python + + # 1) Using the Scoping class + # a. Define a time list that targets the times ids 14, 15, 16, 17 + my_time_list_1 = [14, 15, 16, 17] + # b. Create the time scoping object + my_time_scoping_1 = dpf.Scoping(ids=my_time_list_1, location=dpf.locations.time_freq) + + # 2) Using the time_freq_scoping_factory class + # a. Define a time list that targets the times ids 14, 15, 16, 17 + my_time_list_2 = [14, 15, 16, 17] + # b. Create the time scoping object + my_time_scoping_2 = dpf.time_freq_scoping_factory.scoping_by_sets(cumulative_sets=my_time_list_2) + +Mesh scoping +^^^^^^^^^^^^ + +.. code-block:: python + + # 1) Using the Scoping class in a nodal location + # a. Define a nodes list that targets the nodes with the ids 103, 204, 334, 1802 + my_nodes_ids_1 = [103, 204, 334, 1802] + # b. Create the mesh scoping object + my_mesh_scoping_1 = dpf.Scoping(ids=my_nodes_ids_1, location=dpf.locations.nodal) + + # 2) Using the mesh_scoping_factory class + # a. Define a nodes list that targets the nodes with the ids 103, 204, 334, 1802 + my_nodes_ids_2 = [103, 204, 334, 1802] + # b. Create the mesh scoping object + my_mesh_scoping_2 = dpf.mesh_scoping_factory.nodal_scoping(node_ids=my_nodes_ids_2) + +Extract a |Scoping| +------------------- + +A mesh |Scoping| can be extracted from: + +- A |MeshedRegion| with the |from_mesh| operator; +- A |FieldsContainer| with the |extract_scoping| operator; +- A |Field| with the |extract_scoping| operator. + + +Get the results file +^^^^^^^^^^^^^^^^^^^^ + +Here we will download a result file available in our |Examples| package. +For more information about how to import your result file in DPF check +the :ref:`ref_tutorials_import_result_file` tutorial. + +.. code-block:: python + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + + # Define the result file + result_file_path_1 = examples.download_transient_result() + # Create the model + my_model_1 = dpf.Model(data_sources=result_file_path_1) + # Get the meshed region + my_meshed_region_1 = my_model_1.metadata.meshed_region + # Get a FieldsContainer + my_fc = my_model_1.results.displacement.on_all_time_freqs.eval() + # Get a Field + my_field = my_fc[0] + +Extract the |Scoping| +^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: python + + # 3) Extract the scoping from a mesh + my_mesh_scoping_3 = ops.scoping.from_mesh(mesh=my_meshed_region_1).eval() + print("Scoping from mesh", "\n", my_mesh_scoping_3, "\n") + + # 4) Extract the scoping from a FieldsContainer + extract_scop_fc_op = ops.utility.extract_scoping(field_or_fields_container=my_fc) + my_mesh_scoping_4 = extract_scop_fc_op.outputs.mesh_scoping_as_scopings_container() + print("Scoping from FieldsContainer", "\n", my_mesh_scoping_4, "\n") + + # 5) Extract the scoping from a Field + my_mesh_scoping_5 = ops.utility.extract_scoping(field_or_fields_container=my_field).eval() + print("Scoping from Field", "\n", my_mesh_scoping_5, "\n") + +.. rst-class:: sphx-glr-script-out + + .. jupyter-execute:: + :hide-code: + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + # Define the result file + result_file_path_1 = examples.download_transient_result() + # Create the model + my_model_1 = dpf.Model(data_sources=result_file_path_1) + # Get the meshed region + my_meshed_region_1 = my_model_1.metadata.meshed_region + # Get a FieldsContainer + my_fc = my_model_1.results.displacement.on_all_time_freqs.eval() + # Get a Field + my_field = my_fc[0] + # 3) Extract the scoping from a mesh + my_mesh_scoping_3 = ops.scoping.from_mesh(mesh=my_meshed_region_1).eval() + print("Scoping from mesh", "\n", my_mesh_scoping_3, "\n") + + # 4) Extract the scoping from a FieldsContainer + extract_scop_fc_op = ops.utility.extract_scoping(field_or_fields_container=my_fc) + my_mesh_scoping_4 = extract_scop_fc_op.outputs.mesh_scoping_as_scopings_container() + print("Scoping from FieldsContainer", "\n", my_mesh_scoping_4, "\n") + + # 5) Extract the scoping from a Field + my_mesh_scoping_5 = ops.utility.extract_scoping(field_or_fields_container=my_field).eval() + print("Scoping from Field", "\n", my_mesh_scoping_5, "\n") + +Use a |Scoping| +--------------- + +The |Scoping| object can be used : + +- As an input to a |result op| operator; +- As an |Result| argument when you extract results using the |Model.results| method; +- With the |Result| object methods. + +The mesh scoping can also be changed after the result extraction or manipulation by using the +|rescope| operator with a |Field| or |FieldsContainer|. + +Get the results file +^^^^^^^^^^^^^^^^^^^^ + +Here we will download a result file available in our |Examples| package. +For more information about how to import your result file in DPF check +the :ref:`ref_tutorials_import_result_file` tutorial. + +.. code-block:: python + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + + # Define the result file + result_file_path_1 = examples.download_transient_result() + # Create the DataSources object + my_data_sources_1 = dpf.DataSources(result_path=result_file_path_1) + # Create the model + my_model_1 = dpf.Model(data_sources=my_data_sources_1) + +Extract and scope the results +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Here we extract and scope the displacement results. + +.. code-block:: python + + # 1) Using the result.displacement operator + disp_op = ops.result.displacement(data_sources=my_data_sources_1, + time_scoping=my_time_scoping_1, + mesh_scoping=my_mesh_scoping_1).eval() + + # 2) Using the Model.results + disp_model = my_model_1.results.displacement(time_scoping=my_time_scoping_1, mesh_scoping=my_mesh_scoping_1).eval() + + # 3) Using a Result object method + disp_result_method_1 = my_model_1.results.displacement.on_time_scoping(time_scoping=my_time_scoping_1).on_mesh_scoping(mesh_scoping=my_mesh_scoping_1).eval() + disp_result_method_2 = my_model_1.results.displacement.on_first_time_freq.eval() + + print("Displacement from result.displacement operator", "\n", disp_op, "\n") + print("Displacement from Model.results ", "\n", disp_model, "\n") + print("Scoping from Result object method 1", "\n", disp_result_method_1, "\n") + print("Scoping from Result object method 1", "\n", disp_result_method_2, "\n") + +.. rst-class:: sphx-glr-script-out + + .. jupyter-execute:: + :hide-code: + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + + # Define the result file + result_file_path_1 = examples.download_transient_result() + # Create the DataSources object + my_data_sources_1 = dpf.DataSources(result_path=result_file_path_1) + # Create the model + my_model_1 = dpf.Model(data_sources=my_data_sources_1) + my_time_list_1 = [14, 15, 16, 17] + my_time_scoping_1 = dpf.Scoping(ids=my_time_list_1, location=dpf.locations.time_freq) + my_nodes_ids_1 = [103, 204, 334, 1802] + my_mesh_scoping_1 = dpf.Scoping(ids=my_nodes_ids_1, location=dpf.locations.nodal) + # 1) Using the result.displacement operator + disp_op = ops.result.displacement(data_sources=my_data_sources_1, + time_scoping=my_time_scoping_1, + mesh_scoping=my_mesh_scoping_1).eval() + + # 2) Using the Model.results + disp_model = my_model_1.results.displacement(time_scoping=my_time_scoping_1, mesh_scoping=my_mesh_scoping_1).eval() + + # 3) Using a Result object method + disp_result_method_1 = my_model_1.results.displacement.on_time_scoping(time_scoping=my_time_scoping_1).on_mesh_scoping(mesh_scoping=my_mesh_scoping_1).eval() + disp_result_method_2 = my_model_1.results.displacement.on_first_time_freq.eval() + + print("Displacement from result.displacement operator", "\n", disp_op, "\n") + print("Displacement from Model.results ", "\n", disp_model, "\n") + print("Scoping from Result object method 1", "\n", disp_result_method_1, "\n") + print("Scoping from Result object method 1", "\n", disp_result_method_2, "\n") + +Extract and rescope the results +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Here we rescope the displacement results. + +.. code-block:: python + + # 1) Extract the results for the entire mesh + disp_all_mesh = my_model_1.results.displacement.eval() + + # 2) Rescope the displacement results + disp_rescope = ops.scoping.rescope(fields=disp_rescope, mesh_scoping=my_mesh_scoping_1).eval() + + print("Displacement on all the mesh", "\n", disp_all_mesh, "\n") + print("Displacement rescoped ", "\n", disp_rescope, "\n") + +.. rst-class:: sphx-glr-script-out + + .. jupyter-execute:: + :hide-code: + + disp_all_mesh = my_model_1.results.displacement.eval() + disp_rescope = ops.scoping.rescope(fields=disp_rescope, mesh_scoping=my_mesh_scoping_1).eval() + print("Displacement on all the mesh", "\n", disp_all_mesh, "\n") + print("Displacement rescoped ", "\n", disp_rescope, "\n") \ No newline at end of file From daa28d55a025cbe439f54f63488e00890009afa2 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Thu, 21 Nov 2024 16:24:07 +0100 Subject: [PATCH 03/17] updates narrow_down_data.rst tutorial --- .../user_guide/tutorials/import_data/narrow_down_data.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst index a30ce905e6..afb0c7a603 100644 --- a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst +++ b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst @@ -277,7 +277,7 @@ Here we rescope the displacement results. disp_all_mesh = my_model_1.results.displacement.eval() # 2) Rescope the displacement results - disp_rescope = ops.scoping.rescope(fields=disp_rescope, mesh_scoping=my_mesh_scoping_1).eval() + disp_rescope = ops.scoping.rescope(fields=disp_all_mesh, mesh_scoping=my_mesh_scoping_1).eval() print("Displacement on all the mesh", "\n", disp_all_mesh, "\n") print("Displacement rescoped ", "\n", disp_rescope, "\n") @@ -288,6 +288,6 @@ Here we rescope the displacement results. :hide-code: disp_all_mesh = my_model_1.results.displacement.eval() - disp_rescope = ops.scoping.rescope(fields=disp_rescope, mesh_scoping=my_mesh_scoping_1).eval() + disp_rescope = ops.scoping.rescope(fields=disp_all_mesh, mesh_scoping=my_mesh_scoping_1).eval() print("Displacement on all the mesh", "\n", disp_all_mesh, "\n") print("Displacement rescoped ", "\n", disp_rescope, "\n") \ No newline at end of file From 3bbadc93f206ac7f54a993a6cb71a6f1cf0dd451 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Thu, 21 Nov 2024 16:41:23 +0100 Subject: [PATCH 04/17] add extract_and_explore_results_metadata.rst tutorial --- .../extract_and_explore_results_metadata.rst | 212 +++++++++++++++++- 1 file changed, 206 insertions(+), 6 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst index 630ba1bfbf..6939e08b68 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst @@ -5,16 +5,216 @@ Explore results metadata ======================== .. |Field| replace:: :class:`Field` +.. |Examples| replace:: :mod:`Examples` +.. |ResultInfo| replace:: :class:`ResultInfo` + +You can explore the general results metadata before extracting them by using +the |ResultInfo| object. This metadata includes: + +- Analysis type; +- Physics type; +- Number of results; +- Unit system; +- Solver version, date and time; +- Job name; When you extract a result from a result file DPF stores it in a |Field|. This |Field| will then contain the metadata for the result it is associated with. +This metadata includes: + +- Location; +- Scoping; +- Shape of the data stored; +- Number of components; +- Units of the data. + +This tutorial shows how to extract and explore results metadata from a result file. + +Get the result file +------------------- + +Here we will download a result file available in our |Examples| package. +For more information about how to import your result file in DPF check +the :ref:`ref_tutorials_import_result_file` tutorial. + +Here we get the displacement results. + +.. code-block:: python + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + + # Define the result file + result_file_path_1 = examples.download_transient_result() + # Create the model + my_model_1 = dpf.Model(data_sources=result_file_path_1) + +Explore the general results metadata +------------------------------------ + +Get the |ResultInfo| object from the model and then explore it using this class methods. + +.. code-block:: python + + # Define the ResultInfo object + my_result_info_1 = my_model_1.metadata.result_info + + # Get the analysis type + my_analysis_type = my_result_info_1.analysis_type + print("Analysis type: ",my_analysis_type, "\n") + + # Get the physics type + my_physics_type = my_result_info_1.physics_type + print("Physics type: ",my_physics_type, "\n") + + # Get the number of available results + number_of_results = my_result_info_1.n_results + print("Number of available results: ",number_of_results, "\n") + + # Get the unit system + my_unit_system = my_result_info_1.unit_system + print("Unit system: ",my_unit_system, "\n") + + # Get the solver version, data and time + my_solver_version = my_result_info_1.solver_version + print("Solver version: ",my_solver_version, "\n") + + my_solver_date = my_result_info_1.solver_date + print("Solver date: ", my_solver_date, "\n") + + my_solver_time = my_result_info_1.solver_time + print("Solver time: ",my_solver_time, "\n") + + # Get the job name + my_job_name = my_result_info_1.job_name + print("Job name: ",my_job_name, "\n") + +.. rst-class:: sphx-glr-script-out + + .. jupyter-execute:: + :hide-code: + + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + result_file_path_1 = examples.download_transient_result() + my_model_1 = dpf.Model(data_sources=result_file_path_1) + my_result_info_1 = my_model_1.metadata.result_info + my_analysis_type = my_result_info_1.analysis_type + print("Analysis type: ",my_analysis_type, "\n") + my_physics_type = my_result_info_1.physics_type + print("Physics type: ",my_physics_type, "\n") + number_of_results = my_result_info_1.n_results + print("Number of available results: ",number_of_results, "\n") + my_unit_system = my_result_info_1.unit_system + print("Unit system: ",my_unit_system, "\n") + my_solver_version = my_result_info_1.solver_version + print("Solver version: ",my_solver_version, "\n") + my_solver_date = my_result_info_1.solver_date + print("Solver date: ", my_solver_date, "\n") + my_solver_time = my_result_info_1.solver_time + print("Solver time: ",my_solver_time, "\n") + my_job_name = my_result_info_1.job_name + print("Job name: ",my_job_name, "\n") + +Explore a given result metadata +------------------------------- + +Here we will explore the metadata of the displacement results. + +Start by extracting the displacement results: + +.. code-block:: python + + # Extract the displacement results + disp_results = my_model_1.results.displacement.eval() + + # Get the displacement field + my_disp_field = disp_results[0] + +Explore the displacement results metadata: + +.. code-block:: python + + # Location of the displacement data + my_location = my_disp_field.location + print("Location: ", my_location,'\n') + + # Displacement field scoping + my_scoping = my_disp_field.scoping # type and quantity of entities + print("Scoping: ", '\n',my_scoping, '\n') + + my_scoping_ids = my_disp_field.scoping.ids # Available entities ids + print("Scoping ids: ", '\n', my_scoping_ids, '\n') + + # Elementary data count + # Number of entities (how many data vectors we have) + my_elementary_data_count = my_disp_field.elementary_data_count + print("Elementary data count: ", my_elementary_data_count, '\n') + + # Components count + # Vectors dimension, here we have a displacement so we expect to have 3 components (X, Y and Z) + my_components_count = my_disp_field.component_count + print("Components count: ", my_components_count, '\n') + + # Size + # Length of the data entire vector (equal to the number of elementary data times the number of components) + my_field_size = my_disp_field.size + print("Size: ", my_field_size, '\n') + + # Fields shape + # Gives a tuple with the elementary data count and the components count + my_shape = my_disp_field.shape + print("Shape: ", my_shape, '\n') + + # Units + my_unit = my_disp_field.unit + print("Unit: ", my_unit, '\n') + +.. rst-class:: sphx-glr-script-out + + .. jupyter-execute:: + :hide-code: + + # Extract the displacement results + disp_results = my_model_1.results.displacement.eval() + + # Get the displacement field + my_disp_field = disp_results[0] + + # Location of the displacement data + my_location = my_disp_field.location + print("Location: ", my_location,'\n') + + # Displacement field scoping + my_scoping = my_disp_field.scoping # type and quantity of entities + print("Scoping: ", '\n',my_scoping, '\n') + + my_scoping_ids = my_disp_field.scoping.ids # Available entities ids + print("Scoping ids: ", '\n', my_scoping_ids, '\n') + + # Elementary data count + # Number of entities (how many data vectors we have) + my_elementary_data_count = my_disp_field.elementary_data_count + print("Elementary data count: ", my_elementary_data_count, '\n') -The metadata includes the location, the scoping, the shape of the data stored, -number of components, and units of the data. + # Components count + # Vectors dimension, here we have a displacement so we expect to have 3 components (X, Y and Z) + my_components_count = my_disp_field.component_count + print("Components count: ", my_components_count, '\n') -This tutorial shows how to extract and explore results metadata extracted -from a result file. + # Size + # Length of the data entire vector (equal to the number of elementary data times the number of components) + my_field_size = my_disp_field.size + print("Size: ", my_field_size, '\n') -Get the results ---------------- + # Fields shape + # Gives a tuple with the elementary data count and the components count + my_shape = my_disp_field.shape + print("Shape: ", my_shape, '\n') + # Units + my_unit = my_disp_field.unit + print("Unit: ", my_unit, '\n') \ No newline at end of file From 121ed2a2dd9f13ccd321f2d414d681d00d85eb12 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Thu, 21 Nov 2024 16:42:12 +0100 Subject: [PATCH 05/17] updates extract_and_explore_results_metadata.rst tutorial --- .../import_data/extract_and_explore_results_metadata.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst index 6939e08b68..fa56ec06c3 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst @@ -147,7 +147,7 @@ Explore the displacement results metadata: print("Scoping: ", '\n',my_scoping, '\n') my_scoping_ids = my_disp_field.scoping.ids # Available entities ids - print("Scoping ids: ", '\n', my_scoping_ids, '\n') + print("Scoping ids: ", my_scoping_ids, '\n') # Elementary data count # Number of entities (how many data vectors we have) @@ -193,7 +193,7 @@ Explore the displacement results metadata: print("Scoping: ", '\n',my_scoping, '\n') my_scoping_ids = my_disp_field.scoping.ids # Available entities ids - print("Scoping ids: ", '\n', my_scoping_ids, '\n') + print("Scoping ids: ", my_scoping_ids, '\n') # Elementary data count # Number of entities (how many data vectors we have) From 1fd00919359532230ad43ec7b62ab70519da0dad Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Thu, 21 Nov 2024 16:45:55 +0100 Subject: [PATCH 06/17] updates extract_and_explore_results_metadata.rst tutorial --- .../import_data/extract_and_explore_results_metadata.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst index fa56ec06c3..b7615a7400 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst @@ -19,7 +19,7 @@ the |ResultInfo| object. This metadata includes: - Job name; When you extract a result from a result file DPF stores it in a |Field|. -This |Field| will then contain the metadata for the result it is associated with. +This |Field| will then contain the metadata for the result associated with it. This metadata includes: - Location; From 99468c19800b42fa880484b8e4f4657b3e5e3dc0 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Thu, 21 Nov 2024 16:49:21 +0100 Subject: [PATCH 07/17] updates narrow_down_data.rst tutorial --- .../tutorials/import_data/narrow_down_data.rst | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst index afb0c7a603..8b0aa10960 100644 --- a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst +++ b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst @@ -28,8 +28,18 @@ Understanding a scope --------------------- The data in DPF is represented by a |Field|. Thus, narrow down your results means scoping your |Field|. -To do so in DPF you use the |Scoping| object. For more information on the DPF data storage structures -see :ref:`ref_tutorials_data_structures`. +To do so in DPF you use the |Scoping| object. + +.. note:: + + Scoping is important because when DPF-Core returns the |Field| object, what Python actually has + is a client-side representation of the |Field|, not the entirety of the |Field| itself. This means + that all the data of the field is stored within the DPF service. This is important + because when building your workflows, the most efficient way of interacting with result data + is to minimize the exchange of data between Python and DPF, either by using operators + or by accessing exclusively the data that is needed. + +For more information on the DPF data storage structures see :ref:`ref_tutorials_data_structures`. The |Field| scoping also defines how the data is ordered, for example: the first ID in the scoping identifies to which entity the first data entity belongs. From 0c40796cb4f0a2ea28e6affce632a983fbf9653f Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Thu, 21 Nov 2024 17:10:44 +0100 Subject: [PATCH 08/17] updates extract_and_explore_results_metadata.rst tutorial --- .../import_data/extract_and_explore_results_metadata.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst index b7615a7400..6ffccd9909 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst @@ -33,7 +33,7 @@ This tutorial shows how to extract and explore results metadata from a result fi Get the result file ------------------- -Here we will download a result file available in our |Examples| package. +Here we will download a result file available in our |Examples| package. For more information about how to import your result file in DPF check the :ref:`ref_tutorials_import_result_file` tutorial. From 6e4f2708fedf332941fc37d1ecebca8534e365f7 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Fri, 22 Nov 2024 10:53:20 +0100 Subject: [PATCH 09/17] utilise que juptyter sphinx --- .../import_data/narrow_down_data.rst | 93 ++----------------- 1 file changed, 8 insertions(+), 85 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst index 8b0aa10960..73f4f0ce1a 100644 --- a/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst +++ b/doc/source/user_guide/tutorials/import_data/narrow_down_data.rst @@ -58,7 +58,7 @@ The |Scoping| object can be created by: - Using a scoping factory (|time_freq_scoping_factory| methods for a temporal scoping and |mesh_scoping_factory| for spatial scoping). -.. code-block:: python +.. jupyter-execute:: # Import the ``ansys.dpf.core`` module from ansys.dpf import core as dpf @@ -66,7 +66,7 @@ The |Scoping| object can be created by: Time scoping ^^^^^^^^^^^^ -.. code-block:: python +.. jupyter-execute:: # 1) Using the Scoping class # a. Define a time list that targets the times ids 14, 15, 16, 17 @@ -83,7 +83,7 @@ Time scoping Mesh scoping ^^^^^^^^^^^^ -.. code-block:: python +.. jupyter-execute:: # 1) Using the Scoping class in a nodal location # a. Define a nodes list that targets the nodes with the ids 103, 204, 334, 1802 @@ -114,7 +114,7 @@ Here we will download a result file available in our |Examples| package. For more information about how to import your result file in DPF check the :ref:`ref_tutorials_import_result_file` tutorial. -.. code-block:: python +.. jupyter-execute:: # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage from ansys.dpf import core as dpf @@ -135,7 +135,7 @@ the :ref:`ref_tutorials_import_result_file` tutorial. Extract the |Scoping| ^^^^^^^^^^^^^^^^^^^^^ -.. code-block:: python +.. jupyter-execute:: # 3) Extract the scoping from a mesh my_mesh_scoping_3 = ops.scoping.from_mesh(mesh=my_meshed_region_1).eval() @@ -150,38 +150,6 @@ Extract the |Scoping| my_mesh_scoping_5 = ops.utility.extract_scoping(field_or_fields_container=my_field).eval() print("Scoping from Field", "\n", my_mesh_scoping_5, "\n") -.. rst-class:: sphx-glr-script-out - - .. jupyter-execute:: - :hide-code: - - # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage - from ansys.dpf import core as dpf - from ansys.dpf.core import examples - from ansys.dpf.core import operators as ops - # Define the result file - result_file_path_1 = examples.download_transient_result() - # Create the model - my_model_1 = dpf.Model(data_sources=result_file_path_1) - # Get the meshed region - my_meshed_region_1 = my_model_1.metadata.meshed_region - # Get a FieldsContainer - my_fc = my_model_1.results.displacement.on_all_time_freqs.eval() - # Get a Field - my_field = my_fc[0] - # 3) Extract the scoping from a mesh - my_mesh_scoping_3 = ops.scoping.from_mesh(mesh=my_meshed_region_1).eval() - print("Scoping from mesh", "\n", my_mesh_scoping_3, "\n") - - # 4) Extract the scoping from a FieldsContainer - extract_scop_fc_op = ops.utility.extract_scoping(field_or_fields_container=my_fc) - my_mesh_scoping_4 = extract_scop_fc_op.outputs.mesh_scoping_as_scopings_container() - print("Scoping from FieldsContainer", "\n", my_mesh_scoping_4, "\n") - - # 5) Extract the scoping from a Field - my_mesh_scoping_5 = ops.utility.extract_scoping(field_or_fields_container=my_field).eval() - print("Scoping from Field", "\n", my_mesh_scoping_5, "\n") - Use a |Scoping| --------------- @@ -201,7 +169,7 @@ Here we will download a result file available in our |Examples| package. For more information about how to import your result file in DPF check the :ref:`ref_tutorials_import_result_file` tutorial. -.. code-block:: python +.. jupyter-execute:: # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage from ansys.dpf import core as dpf @@ -220,7 +188,7 @@ Extract and scope the results Here we extract and scope the displacement results. -.. code-block:: python +.. jupyter-execute:: # 1) Using the result.displacement operator disp_op = ops.result.displacement(data_sources=my_data_sources_1, @@ -239,49 +207,12 @@ Here we extract and scope the displacement results. print("Scoping from Result object method 1", "\n", disp_result_method_1, "\n") print("Scoping from Result object method 1", "\n", disp_result_method_2, "\n") -.. rst-class:: sphx-glr-script-out - - .. jupyter-execute:: - :hide-code: - - # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage - from ansys.dpf import core as dpf - from ansys.dpf.core import examples - from ansys.dpf.core import operators as ops - - # Define the result file - result_file_path_1 = examples.download_transient_result() - # Create the DataSources object - my_data_sources_1 = dpf.DataSources(result_path=result_file_path_1) - # Create the model - my_model_1 = dpf.Model(data_sources=my_data_sources_1) - my_time_list_1 = [14, 15, 16, 17] - my_time_scoping_1 = dpf.Scoping(ids=my_time_list_1, location=dpf.locations.time_freq) - my_nodes_ids_1 = [103, 204, 334, 1802] - my_mesh_scoping_1 = dpf.Scoping(ids=my_nodes_ids_1, location=dpf.locations.nodal) - # 1) Using the result.displacement operator - disp_op = ops.result.displacement(data_sources=my_data_sources_1, - time_scoping=my_time_scoping_1, - mesh_scoping=my_mesh_scoping_1).eval() - - # 2) Using the Model.results - disp_model = my_model_1.results.displacement(time_scoping=my_time_scoping_1, mesh_scoping=my_mesh_scoping_1).eval() - - # 3) Using a Result object method - disp_result_method_1 = my_model_1.results.displacement.on_time_scoping(time_scoping=my_time_scoping_1).on_mesh_scoping(mesh_scoping=my_mesh_scoping_1).eval() - disp_result_method_2 = my_model_1.results.displacement.on_first_time_freq.eval() - - print("Displacement from result.displacement operator", "\n", disp_op, "\n") - print("Displacement from Model.results ", "\n", disp_model, "\n") - print("Scoping from Result object method 1", "\n", disp_result_method_1, "\n") - print("Scoping from Result object method 1", "\n", disp_result_method_2, "\n") - Extract and rescope the results ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Here we rescope the displacement results. -.. code-block:: python +.. jupyter-execute:: # 1) Extract the results for the entire mesh disp_all_mesh = my_model_1.results.displacement.eval() @@ -292,12 +223,4 @@ Here we rescope the displacement results. print("Displacement on all the mesh", "\n", disp_all_mesh, "\n") print("Displacement rescoped ", "\n", disp_rescope, "\n") -.. rst-class:: sphx-glr-script-out - .. jupyter-execute:: - :hide-code: - - disp_all_mesh = my_model_1.results.displacement.eval() - disp_rescope = ops.scoping.rescope(fields=disp_all_mesh, mesh_scoping=my_mesh_scoping_1).eval() - print("Displacement on all the mesh", "\n", disp_all_mesh, "\n") - print("Displacement rescoped ", "\n", disp_rescope, "\n") \ No newline at end of file From 1fd80a7120deca06aa70af41fe979096e688f0fe Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Fri, 22 Nov 2024 10:55:16 +0100 Subject: [PATCH 10/17] use only jupyter sphinx: extract_and_explore_results_metadata.rst --- .../extract_and_explore_results_metadata.rst | 82 +------------------ 1 file changed, 4 insertions(+), 78 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst index 6ffccd9909..0ef98867e9 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst @@ -39,7 +39,7 @@ the :ref:`ref_tutorials_import_result_file` tutorial. Here we get the displacement results. -.. code-block:: python +.. jupyter-execute:: # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage from ansys.dpf import core as dpf @@ -56,7 +56,7 @@ Explore the general results metadata Get the |ResultInfo| object from the model and then explore it using this class methods. -.. code-block:: python +.. jupyter-execute:: # Define the ResultInfo object my_result_info_1 = my_model_1.metadata.result_info @@ -91,34 +91,6 @@ Get the |ResultInfo| object from the model and then explore it using this class my_job_name = my_result_info_1.job_name print("Job name: ",my_job_name, "\n") -.. rst-class:: sphx-glr-script-out - - .. jupyter-execute:: - :hide-code: - - from ansys.dpf import core as dpf - from ansys.dpf.core import examples - from ansys.dpf.core import operators as ops - result_file_path_1 = examples.download_transient_result() - my_model_1 = dpf.Model(data_sources=result_file_path_1) - my_result_info_1 = my_model_1.metadata.result_info - my_analysis_type = my_result_info_1.analysis_type - print("Analysis type: ",my_analysis_type, "\n") - my_physics_type = my_result_info_1.physics_type - print("Physics type: ",my_physics_type, "\n") - number_of_results = my_result_info_1.n_results - print("Number of available results: ",number_of_results, "\n") - my_unit_system = my_result_info_1.unit_system - print("Unit system: ",my_unit_system, "\n") - my_solver_version = my_result_info_1.solver_version - print("Solver version: ",my_solver_version, "\n") - my_solver_date = my_result_info_1.solver_date - print("Solver date: ", my_solver_date, "\n") - my_solver_time = my_result_info_1.solver_time - print("Solver time: ",my_solver_time, "\n") - my_job_name = my_result_info_1.job_name - print("Job name: ",my_job_name, "\n") - Explore a given result metadata ------------------------------- @@ -126,7 +98,7 @@ Here we will explore the metadata of the displacement results. Start by extracting the displacement results: -.. code-block:: python +.. jupyter-execute:: # Extract the displacement results disp_results = my_model_1.results.displacement.eval() @@ -136,7 +108,7 @@ Start by extracting the displacement results: Explore the displacement results metadata: -.. code-block:: python +.. jupyter-execute:: # Location of the displacement data my_location = my_disp_field.location @@ -172,49 +144,3 @@ Explore the displacement results metadata: # Units my_unit = my_disp_field.unit print("Unit: ", my_unit, '\n') - -.. rst-class:: sphx-glr-script-out - - .. jupyter-execute:: - :hide-code: - - # Extract the displacement results - disp_results = my_model_1.results.displacement.eval() - - # Get the displacement field - my_disp_field = disp_results[0] - - # Location of the displacement data - my_location = my_disp_field.location - print("Location: ", my_location,'\n') - - # Displacement field scoping - my_scoping = my_disp_field.scoping # type and quantity of entities - print("Scoping: ", '\n',my_scoping, '\n') - - my_scoping_ids = my_disp_field.scoping.ids # Available entities ids - print("Scoping ids: ", my_scoping_ids, '\n') - - # Elementary data count - # Number of entities (how many data vectors we have) - my_elementary_data_count = my_disp_field.elementary_data_count - print("Elementary data count: ", my_elementary_data_count, '\n') - - # Components count - # Vectors dimension, here we have a displacement so we expect to have 3 components (X, Y and Z) - my_components_count = my_disp_field.component_count - print("Components count: ", my_components_count, '\n') - - # Size - # Length of the data entire vector (equal to the number of elementary data times the number of components) - my_field_size = my_disp_field.size - print("Size: ", my_field_size, '\n') - - # Fields shape - # Gives a tuple with the elementary data count and the components count - my_shape = my_disp_field.shape - print("Shape: ", my_shape, '\n') - - # Units - my_unit = my_disp_field.unit - print("Unit: ", my_unit, '\n') \ No newline at end of file From d198f4589c8a28e8222e82c01b81a583f1b421d7 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Fri, 22 Nov 2024 11:27:49 +0100 Subject: [PATCH 11/17] add import_result_file.rst tutorial --- .../import_data/import_result_file.rst | 320 ++++++++++++++++++ 1 file changed, 320 insertions(+) diff --git a/doc/source/user_guide/tutorials/import_data/import_result_file.rst b/doc/source/user_guide/tutorials/import_data/import_result_file.rst index 9e9c2fc2ed..e9c68e607d 100644 --- a/doc/source/user_guide/tutorials/import_data/import_result_file.rst +++ b/doc/source/user_guide/tutorials/import_data/import_result_file.rst @@ -1,2 +1,322 @@ .. _ref_tutorials_import_result_file: +========================= +Import result file in DPF +========================= + +.. |Model| replace:: :class:`Model ` +.. |DataSources| replace:: :class:`DataSources ` +.. |Examples| replace:: :mod:`Examples` +.. |set_result_file_path| replace:: :func:`set_result_file_path() ` +.. |add_file_path| replace:: :func:`add_file_path() ` + +This tutorial shows how to import a result file in DPF. + +You have two approaches to import a result file in DPF: + +- Using the |DataSources| object +- Using the |Model| object + +.. note:: + + The |Model| extracts a large amount of information by default (results, mesh and analysis data). + If using this helper takes a long time for processing the code, mind using a |DataSources| object + and instantiating operators directly with it. Check the ":ref:`get_mesh_mesh_provider`" for more + information on how to get a mesh from a result file. + +Define the result file path +--------------------------- + +Both approaches need a file path to be defined. Here we will download result files available in +our |Examples| package. + +.. tab-set:: + + .. tab-item:: MAPDL + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + + # Define the .rst result file + result_file_path_11 = examples.find_static_rst() + + # Define the modal superposition harmonic analysis (.mode, .rfrq and .rst) result files + result_file_path_12 = examples.download_msup_files_to_dict() + + print("1:", "\n",result_file_path_11, "\n") + print("2:", "\n",result_file_path_12, "\n") + + .. tab-item:: LSDYNA + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + + # Define the .d3plot result file + result_file_path_21 = examples.download_d3plot_beam() + + # Define the .binout result file + result_file_path_22 = examples.download_binout_matsum() + + print("1:", "\n",result_file_path_21, "\n") + print("2:", "\n",result_file_path_22, "\n") + + .. tab-item:: Fluent + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + + # Define the project .flprj result file + result_file_path_31 = examples.download_fluent_axial_comp()["flprj"] + + # Define the CFF .cas.h5/.dat.h5 result files + result_file_path_32 = examples.download_fluent_axial_comp() + + print("1:", "\n",result_file_path_31, "\n") + print("2:", "\n",result_file_path_32, "\n") + + .. tab-item:: CFX + + .. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + + # Define the project .res result file + result_file_path_41 = examples.download_cfx_mixing_elbow() + + # Define the CFF .cas.cff/.dat.cff result files + result_file_path_42 = examples.download_cfx_heating_coil() + + print("1:", "\n",result_file_path_41, "\n") + print("2:", "\n",result_file_path_42, "\n") + +Use a |DataSources| +------------------- + +The |DataSources| object manages paths to their files. Use this object to declare data +inputs for DPF operators and define their locations. + +.. tab-set:: + + .. tab-item:: MAPDL + + **a) `.rst` result file** + + You create the |DataSources| object by defining the the path to the main result file. + + .. jupyter-execute:: + + # Create the DataSources object + my_data_sources_11 = dpf.DataSources(result_path=result_file_path_11) + + **b) `.mode`, `.rfrq` and `.rst` result files** + + In the modal superposition, modal coefficients are multiplied by mode shapes (of a previous modal analysis) + to analyse a structure under given boundary conditions in a range of frequencies. Doing this expansion “on demand” + in DPF instead of in the solver reduces the size of the result files. + + The expansion is recursive in DPF: first the modal response is read. Then, “upstream” mode shapes are found in + the data sources, where they are read and expanded. + + To create a recursive workflow you have to add the upstream data to the main |DataSources| object. Upstream refers + to a source that provides data to a particular process. + + .. jupyter-execute:: + + # Create the DataSources object + my_data_sources_12 = dpf.DataSources() + # Define the main result data + my_data_sources_12.set_result_file_path(filepath=result_file_path_12["rfrq"], key='rfrq') + + # Create the upstream DataSources object with the main upstream data + up_stream_ds_12 = dpf.DataSources(result_path=result_file_path_12["mode"]) + # Add the additional upstream data to the upstream DataSources object + up_stream_ds_12.add_file_path(filepath=result_file_path_12["rst"]) + + # Add the upstream DataSources to the main DataSources object + my_data_sources_12.add_upstream(upstream_data_sources=up_stream_ds_12) + + .. tab-item:: LSDYNA + + **a) `.d3plot` result file** + + This LS-DYNA d3plot file contains several individual results, each at different times. + The d3plot file does not contain information related to Units. In this case, as the + simulation was run through Mechanical, a ``file.actunits`` file is produced. If this + file is supplemented in the |DataSources|, the units will be correctly fetched for all + results in the file as well as for the mesh. + + .. jupyter-execute:: + + # Create the DataSources object + my_data_sources_21 = dpf.DataSources() + my_data_sources_21.set_result_file_path(filepath=result_file_path_21[0], key="d3plot") + my_data_sources_21.add_file_path(filepath=result_file_path_21[3], key="actunits") + + **b) `.binout` result file** + + The extension key ``.binout`` is not specified in the result file. Thus, we use the + |set_result_file_path| method to correctly implement the result file to the |DataSources| by giving + explicitly the extension key as an argument. + + .. jupyter-execute:: + + # Create the DataSources object + my_data_sources_22 = dpf.DataSources() + # Define the the path to the main result + my_data_sources_22.set_result_file_path(filepath=result_file_path_22, key="binout") + + .. tab-item:: Fluent + + **a) `.flprj` result file** + + You create the |DataSources| object by defining the the path to the main result file. + + .. jupyter-execute:: + + # Create the DataSources object + my_data_sources_31 = dpf.DataSources(result_path=result_file_path_31) + + **b) `.cas.h5`, `.dat.h5` result files** + + Here we have a main and an additional result files. Thus, we use the + |set_result_file_path| method, to correctly implement the result file to the |DataSources| by giving + explicitly the first extension key as an argument, and the |add_file_path| method, to add the additional + result file. + + .. jupyter-execute:: + + # Create the DataSources object + my_data_sources_32 = dpf.DataSources() + # Define the path to the main result file + my_data_sources_32.set_result_file_path(filepath=result_file_path_32['cas'][0], key="cas") + # Add the additional result file to the DataSources + my_data_sources_32.add_file_path(filepath=result_file_path_32['dat'][0], key="dat") + + .. tab-item:: CFX + + **a) `.res` result file** + + You create the |DataSources| object by defining the the path to the main result file. + + .. jupyter-execute:: + + # Create the DataSources object + my_data_sources_41 = dpf.DataSources(result_path=result_file_path_41) + + **b) `.cas.cff`, `.dat.cff` result files** + + Here we have a main and an additional result files. Thus, we use the + |set_result_file_path| method, to correctly implement the result file to the |DataSources| by giving + explicitly the first extension key as an argument, and the |add_file_path| method, to add the additional + result file. + + .. jupyter-execute:: + + # Create the DataSources object + my_data_sources_42 = dpf.DataSources() + # Define the path to the main result file + my_data_sources_42.set_result_file_path(filepath=result_file_path_42["cas"], key="cas") + # Add the additional result file to the DataSources + my_data_sources_42.add_file_path(filepath=result_file_path_42["dat"], key="dat") + +Use a |Model| +------------- + +The |Model| is a helper designed to give shortcuts to the user to access the analysis results +metadata, by opening a DataSources or a Streams, and to instanciate results provider for it. + +To create a |Model| you can provide the result file path, in the case you are working with a single result +file with an explicit extension key, or a |DataSources| as an argument. + +.. tab-set:: + + .. tab-item:: MAPDL + + **a) `.rst` result file** + + .. jupyter-execute:: + + # Create the model with the result file path + my_model_11 = dpf.Model(data_sources=result_file_path_11) + + # Create the model with the DataSources + my_model_12 = dpf.Model(data_sources=my_data_sources_11) + + **b) `.mode`, `.rfrq` and `.rst` result files** + + .. jupyter-execute:: + + # Create the model with the DataSources + my_model_13 = dpf.Model(data_sources=my_data_sources_12) + + .. tab-item:: LSDYNA + + **a) `.d3plot` result file** + + .. jupyter-execute:: + + # Create the model with the DataSources + my_model_21 = dpf.Model(data_sources=my_data_sources_21) + + **b) `.binout` result file** + + .. jupyter-execute:: + + # Create the model with the DataSources + my_model_22 = dpf.Model(data_sources=my_data_sources_22) + + .. tab-item:: Fluent + + **a) `.flprj` result file** + + .. jupyter-execute:: + + # Create the model with the result file path + my_model_31 = dpf.Model(data_sources=result_file_path_31) + + # Create the model with the DataSources + my_model_32 = dpf.Model(data_sources=my_data_sources_31) + + **b) `.cas.h5`, `.dat.h5` result files** + + .. jupyter-execute:: + + # Create the model with the DataSources + my_model_33 = dpf.Model(data_sources=my_data_sources_32) + + .. tab-item:: CFX + + .. jupyter-execute:: + + **a) `.res` result file** + + .. jupyter-execute:: + + # Create the model with the result file path + my_model_41 = dpf.Model(data_sources=result_file_path_41) + + # Create the model with the DataSources + my_model_42 = dpf.Model(data_sources=my_data_sources_41) + + **b) `.cas.cff`, `.dat.cff` result files** + + .. jupyter-execute:: + + # Create the model with the DataSources + my_model_43 = dpf.Model(data_sources=my_data_sources_42) + From 098d3e17e38c6c6f90b1b469a06a9ea379d0a216 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Fri, 22 Nov 2024 11:28:44 +0100 Subject: [PATCH 12/17] updates --- .../extract_and_explore_results_data.rst | 21 +++++++++++++++++++ .../tutorials/import_data/index.rst | 7 +++---- 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst index 09c984905a..4cc427331a 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst @@ -1,2 +1,23 @@ .. _ref_tutorials_extract_and_explore_results_data: +==================== +Explore results data +==================== + +.. |Field| replace:: :class:`Field` +.. |Examples| replace:: :mod:`Examples` + +This tutorial shows how to extract and explore results data from a result file. + +When you extract a result from a result file DPF stores it in a |Field|. +This |Field| will then contain the data of the result associated with it. + +When DPF-Core returns the |Field| object, what Python actually has is a client-side +representation of the |Field|, not the entirety of the |Field| itself. This means +that all the data of the field is stored within the DPF service. This is important +because when building your workflows, the most efficient way of interacting with result data +is to minimize the exchange of data between Python and DPF, either by using operators +or by accessing exclusively the data that is needed. + + + diff --git a/doc/source/user_guide/tutorials/import_data/index.rst b/doc/source/user_guide/tutorials/import_data/index.rst index a2d51f2beb..b366137b27 100644 --- a/doc/source/user_guide/tutorials/import_data/index.rst +++ b/doc/source/user_guide/tutorials/import_data/index.rst @@ -42,16 +42,15 @@ From result files :link-type: ref :text-align: center - This tutorial shows how to import a result file in DPF + This tutorial shows how to import a result file in DPF. .. grid-item-card:: Extract and explore results metadata :link: ref_tutorials_extract_and_explore_results_metadata :link-type: ref :text-align: center - This tutorial shows how to extract and explore results metadata (unit, - location, the scoping, the shape of the data stored ... ) extracted - from a result file. + This tutorial shows how to extract and explore results metadata (analysis type, + physics type, unit system ... ) from a result file. .. grid-item-card:: Extract and explore results data :link: ref_tutorials_extract_and_explore_results_data From 137952fa1ca1255c40d508951813300a712a1d62 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Fri, 22 Nov 2024 15:27:34 +0100 Subject: [PATCH 13/17] add extract_and_explore_results_data.rst tutorial --- .../extract_and_explore_results_data.rst | 128 +++++++++++++++++- 1 file changed, 124 insertions(+), 4 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst index 4cc427331a..f936bf03a1 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_data.rst @@ -1,16 +1,20 @@ .. _ref_tutorials_extract_and_explore_results_data: -==================== -Explore results data -==================== +================================ +Extract and explore results data +================================ .. |Field| replace:: :class:`Field` .. |Examples| replace:: :mod:`Examples` +.. |Result| replace:: :class:`Result ` +.. |FieldsContainer| replace:: :class:`FieldsContainer` +.. |get_entity_data| replace:: :func:`get_entity_data()` +.. |get_entity_data_by_id| replace:: :func:`get_entity_data_by_id()` This tutorial shows how to extract and explore results data from a result file. When you extract a result from a result file DPF stores it in a |Field|. -This |Field| will then contain the data of the result associated with it. +This |Field| will contain the data of the result associated with it. When DPF-Core returns the |Field| object, what Python actually has is a client-side representation of the |Field|, not the entirety of the |Field| itself. This means @@ -19,5 +23,121 @@ because when building your workflows, the most efficient way of interacting with is to minimize the exchange of data between Python and DPF, either by using operators or by accessing exclusively the data that is needed. +The |Field| data is ordered with respect to its scoping ids (check the :ref:`reft_tutorials_narrow_down_data` +tutorial for more information on scoping manipulations). +Get the results +--------------- +Here we will download a result file available in our |Examples| package. +For more information about how to import your result file in DPF check +the :ref:`ref_tutorials_import_result_file` tutorial. + +Here we extract the displacement results. The displacement |Result| object gives a |FieldsContainer| when evaluated. +Thus, we will get a |Field| from this |FieldsContainer|. + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage + from ansys.dpf import core as dpf + from ansys.dpf.core import examples + from ansys.dpf.core import operators as ops + + # Define the result file + result_file_path_1 = examples.download_transient_result() + + # Create the model + my_model_1 = dpf.Model(data_sources=result_file_path_1) + + # Extract the displacement results for the last time step + disp_results = my_model_1.results.displacement.on_last_time_freq.eval() + + # Get the displacement field for the last time step + my_disp_field = disp_results[0] + + print(my_disp_field) + +Extract all data from a field +----------------------------- + +You can extract the the entire data in the |Field| as an array (numpy array) or as a list. + +Data as an array +^^^^^^^^^^^^^^^^ + +.. jupyter-execute:: + + # Get the displacement data as an array + my_data_array = my_disp_field.data + print("Displacement data as an array: ", '\n', my_data_array) + +Note that this array is a genuine, local, numpy array (overloaded by the DPFArray): + +.. jupyter-execute:: + + print("Array type: ", type(my_data_array)) + +Data as a list +^^^^^^^^^^^^^^ + +.. jupyter-execute:: + + # Get the displacement data as a list + my_data_list = my_disp_field.data_as_list + print("Displacement data as a list: ", '\n', my_data_list) + +Extract specific data from a field +---------------------------------- + +If you need to access data for specific entities (node, element ...), you can extract it +based on its index (data position on the |Field| by using the |get_entity_data| method), or based +on the entities id (by using the |get_entity_data_by_id| method). + +Get the data by the entity index +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. jupyter-execute:: + + # Get the data from the third entity in the field + data_3_entity = my_disp_field.get_entity_data(index=3) + print("Data entity index=3: ", data_3_entity) + +Get the data by the entity ind +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. jupyter-execute:: + + # Get the data from the entity with id=533 + data_533_entity = my_disp_field.get_entity_data_by_id(id=533) + print("Data entity id=533: ", data_533_entity) + +Note that the element with id=533 would correspond to an index=2 within the |Field|. + +.. jupyter-execute:: + + # Get the index of the entity with id=533 + index_533_entity = my_disp_field.scoping.index(id=533) + print("Index entity id=533: ",index_533_entity) + +Be aware that scoping IDs are not sequential. You would get the id of the element in the 533 +position of the |Field| with: + +.. jupyter-execute:: + + # Get the id of the entity with index=533 + id_533_entity = my_disp_field.scoping.id(index=533) + print("Id entity index=533: ",id_533_entity) + + +While these methods are acceptable when requesting data for a few elements +or nodes, they should not be used when looping over the entire array. For efficiency, +a |Field|s data can be recovered locally before sending a large number of requests: + +.. jupyter-execute:: + + # Create a deep copy of the field that can be accessed and modified locally. + with my_disp_field.as_local_field() as f: + for i in my_disp_field.scoping.ids[2:50]: + f.get_entity_data_by_id(i) + + print(f) \ No newline at end of file From 22de65430488a7796231b209ad1a83be6c347192 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Fri, 22 Nov 2024 15:28:17 +0100 Subject: [PATCH 14/17] updates on extract_and_explore_results_metadata.rst --- .../import_data/extract_and_explore_results_metadata.rst | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst index 0ef98867e9..bfcbea2ca4 100644 --- a/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst +++ b/doc/source/user_guide/tutorials/import_data/extract_and_explore_results_metadata.rst @@ -1,8 +1,8 @@ .. _ref_tutorials_extract_and_explore_results_metadata: -======================== -Explore results metadata -======================== +==================================== +Extract and explore results metadata +==================================== .. |Field| replace:: :class:`Field` .. |Examples| replace:: :mod:`Examples` @@ -37,8 +37,6 @@ Here we will download a result file available in our |Examples| package. For more information about how to import your result file in DPF check the :ref:`ref_tutorials_import_result_file` tutorial. -Here we get the displacement results. - .. jupyter-execute:: # Import the ``ansys.dpf.core`` module, including examples files and the operators subpackage From bbcd6ecd31a9e397ba410f95271da108ec3a63ec Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Mon, 25 Nov 2024 16:39:42 +0100 Subject: [PATCH 15/17] add represent_data_on_dpf.rst tutorial --- .../import_data/represent_data_on_dpf.rst | 101 ++++++++++++++++++ 1 file changed, 101 insertions(+) diff --git a/doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst b/doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst index 38a6299292..75637032c9 100644 --- a/doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst +++ b/doc/source/user_guide/tutorials/import_data/represent_data_on_dpf.rst @@ -1,2 +1,103 @@ .. _ref_tutorials_represent_data_on_dpf: +======================== +Manual input data on DPF +======================== + +.. |Field| replace:: :class:`Field` +.. |FieldsContainer| replace:: :class:`FieldsContainer` +.. |append| replace:: :func:`append()` +.. |data| replace:: :attr:`Field.data` +.. |scoping| replace:: :attr:`Field.scoping` + +This tutorial shows how to represent your manual input data in a DPF data storage structures. + +When handling data DPF uses |FieldsContainer| and |Field| to store and return it. The |Field| is a DPF array +and a collection of |Field| is called |FieldsContainer|. For more information on how the data is structure +in a |Field| and how the DPF data storage structures works check the :ref:`ref_tutorials_data_structures` +tutorial section. + +Here we will create some 3d vector |Field|, where the data comes from lists. + +Defining the fields +------------------- + +To manually import data on DPF you have to create the structure to store it. + +Here we create a |Field| from scratch by instantiating this object. When using this approach the |Field| has +vector nature by default. Check the :ref:`ref_tutorials_data_structures` tutorial section for more information +on others approaches. + +We will need two 3d vector |Field|: + +.. jupyter-execute:: + + # Import the ``ansys.dpf.core`` module + from ansys.dpf import core as dpf + + # Create the fields + # a. Define the number of entities + num_entities_1 = 2 + + # b. Instanciate the field + field_1 = dpf.Field(nentities=num_entities_1) + field_2 = dpf.Field(nentities=num_entities_1) + field_3 = dpf.Field(nentities=num_entities_1) + field_4 = dpf.Field(nentities=num_entities_1) + + # c. Define the scoping ids + + field_3.scoping.ids = range(num_entities_1) + field_4.scoping.ids = range(num_entities_1) + + # d. Create a FieldsContainer + fc_1 = dpf.fields_container_factory.over_time_freq_fields_container(fields=[field_1, field_2]) + + # Check the Fields and the FieldsContainer + print("Field 1: ", "\n" ,field_1, "\n") + print("Field 2: ", "\n" ,field_2, "\n") + print("Field 3: ", "\n" ,field_3, "\n") + print("Field 4: ", "\n" ,field_4, "\n") + print("FieldsContainer: ", "\n" ,fc_1, "\n") + +Add data to the fields +---------------------- + +Here we define the data and then add it to the fields. + +You can add data to a |Field| by using the |append| method, if you have not set the |scoping| property +with the scoping ids, or the |data| property, if you have set the |scoping| property +with the scoping ids. + +.. jupyter-execute:: + + # Define and add the data to the fields + # a. Using the append method + + # Define the Fields data + data_11 = [1.0, 2.0, 3.0] + data_12 = [4.0, 5.0, 6.0] + data_21 = [7.0, 3.0, 5.0] + data_22 = [8.0, 1.0, 2.0] + + # Add the data to the field + field_1.append(data=data_11, scopingid=0) + field_1.append(data=data_12, scopingid=1) + field_2.append(data=data_21, scopingid=0) + field_2.append(data=data_22, scopingid=1) + + # b. Using the data property + + # Define the Fields data + data_3b = [6.0, 5.0, 4.0, 3.0, 2.0, 1.0] + data_4b = [4.0, 1.0, 8.0, 5.0, 7.0, 9.0] + + # Add the data to the field + field_3.data = data_3b + field_4.data = data_4b + + # Check the Fields + print("Field 1: ", "\n", field_1, "\n") + print("Field 2: ", "\n", field_2, "\n") + print("Field 3: ", "\n" ,field_3, "\n") + print("Field 4: ", "\n" ,field_4, "\n") From f22dd3927bb042c44b2974f96567d5a94ff42270 Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Mon, 25 Nov 2024 17:10:32 +0100 Subject: [PATCH 16/17] updates on the index page --- doc/source/user_guide/tutorials/import_data/index.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/index.rst b/doc/source/user_guide/tutorials/import_data/index.rst index b366137b27..4944ce6d2a 100644 --- a/doc/source/user_guide/tutorials/import_data/index.rst +++ b/doc/source/user_guide/tutorials/import_data/index.rst @@ -20,9 +20,9 @@ From user input :link-type: ref :text-align: center - Learn how to import data in DPF from csv file + Learn how to import data in DPF from a csv file - .. grid-item-card:: Represent your data in DPF + .. grid-item-card:: Manual input data on DPF :link: ref_tutorials_represent_data_on_dpf :link-type: ref :text-align: center From 2e1c8d94f1450183022177be5e94c993074291ba Mon Sep 17 00:00:00 2001 From: luisaFelixSalles Date: Mon, 25 Nov 2024 17:21:21 +0100 Subject: [PATCH 17/17] updates --- .../tutorials/import_data/import_result_file.rst | 2 +- doc/source/user_guide/tutorials/import_data/index.rst | 7 ------- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/doc/source/user_guide/tutorials/import_data/import_result_file.rst b/doc/source/user_guide/tutorials/import_data/import_result_file.rst index e9c68e607d..8bcb8a1163 100644 --- a/doc/source/user_guide/tutorials/import_data/import_result_file.rst +++ b/doc/source/user_guide/tutorials/import_data/import_result_file.rst @@ -21,7 +21,7 @@ You have two approaches to import a result file in DPF: The |Model| extracts a large amount of information by default (results, mesh and analysis data). If using this helper takes a long time for processing the code, mind using a |DataSources| object - and instantiating operators directly with it. Check the ":ref:`get_mesh_mesh_provider`" for more + and instantiating operators directly with it. Check the :ref:`get_mesh_mesh_provider` for more information on how to get a mesh from a result file. Define the result file path diff --git a/doc/source/user_guide/tutorials/import_data/index.rst b/doc/source/user_guide/tutorials/import_data/index.rst index 4944ce6d2a..c7ead0bdde 100644 --- a/doc/source/user_guide/tutorials/import_data/index.rst +++ b/doc/source/user_guide/tutorials/import_data/index.rst @@ -15,13 +15,6 @@ From user input :padding: 2 :margin: 2 - .. grid-item-card:: Import data from csv file - :link: ref_tutorials - :link-type: ref - :text-align: center - - Learn how to import data in DPF from a csv file - .. grid-item-card:: Manual input data on DPF :link: ref_tutorials_represent_data_on_dpf :link-type: ref