diff --git a/.github/workflows/doc-build.yml b/.github/workflows/doc-build.yml new file mode 100644 index 00000000..78f1a476 --- /dev/null +++ b/.github/workflows/doc-build.yml @@ -0,0 +1,148 @@ +name: doc-build + +on: [push, pull_request, workflow_dispatch] + +permissions: + contents: write + pages: write + id-token: write + +jobs: + build-source: + runs-on: windows-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Setup conda environment + uses: conda-incubator/setup-miniconda@v2 + with: + auto-update-conda: true + python-version: 3.9.1 + + - name: Create diff_check conda environment + run: | + conda env create -f environment.yml + + - name: Cache conda environment cache + uses: actions/cache@v2 + with: + path: C:\Miniconda\envs\diff_check + key: ${{ runner.os }}-conda-${{ hashFiles('environment.yml') }} + + - name: Cmake Configure + run: | + conda run --name diff_check --no-capture-output cmake -S . -B build -A x64 -DBUILD_PYTHON_MODULE=ON -DBUILD_TESTS=OFF -DRUN_TESTS=OFF + + - name: CMake Build + run: conda run --name diff_check --no-capture-output cmake --build build --config Release + + # upload artifacts + - name: Move dlls and pyd files to single directories + run: | + mkdir $env:GITHUB_WORKSPACE\artifacts_dlls + mkdir $env:GITHUB_WORKSPACE\artifacts_pyds + Get-ChildItem -Path $env:GITHUB_WORKSPACE\build\bin\Release -Filter *.dll -Recurse | Move-Item -Destination $env:GITHUB_WORKSPACE\artifacts_dlls + Get-ChildItem -Path $env:GITHUB_WORKSPACE\build\Release -Filter *.pyd -Recurse | Move-Item -Destination $env:GITHUB_WORKSPACE\artifacts_pyds + shell: pwsh + - name: Upload artifacts - dlls + uses: actions/upload-artifact@v2 + with: + name: __build_artifacts_dlls__ + path: ${{ github.workspace }}/artifacts_dlls/* + - name: Upload artifacts - pyds + uses: actions/upload-artifact@v2 + with: + name: __build_artifacts_pyds__ + path: ${{ github.workspace }}/artifacts_pyds/* + + + build-docs: + runs-on: windows-latest + needs: build-source + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Setup conda environment + uses: conda-incubator/setup-miniconda@v2 + with: + auto-update-conda: true + python-version: 3.9.1 + + - name: Restore conda environment cache + uses: actions/cache@v2 + with: + path: C:\Miniconda\envs\diff_check + key: ${{ runner.os }}-conda-${{ hashFiles('environment.yml') }} + restore-keys: | + ${{ runner.os }}-conda- + + # download artifacts + - name: Download dlls for doc folder + uses: actions/download-artifact@v2 + with: + name: __build_artifacts_dlls__ + path: ${{github.workspace}}/doc + - name: Download pyds for doc folder + uses: actions/download-artifact@v2 + with: + name: __build_artifacts_pyds__ + path: ${{github.workspace}}/doc + - name: Download dlls for diffCheck py package + uses: actions/download-artifact@v2 + with: + name: __build_artifacts_dlls__ + path: ${{github.workspace}}/src/gh/diffCheck/diffCheck/dlls + - name: Download pyds for diffCheck py package + uses: actions/download-artifact@v2 + with: + name: __build_artifacts_pyds__ + path: ${{github.workspace}}/src/gh/diffCheck/diffCheck + + - name: Sphinx build + run: | + conda run --name diff_check --no-capture-output sphinx-build -b html -v doc _build + + - name: Upload documentation + uses: actions/upload-artifact@v2 + with: + name: __build_sphx_docs__ + path: ${{ github.workspace }}/_build + + + page-deployement: + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + needs: build-docs + # Run only on pushes to the default branch + if: github.ref == 'refs/heads/main' + # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. + # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. + concurrency: + group: "pages" + cancel-in-progress: false + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Download sphinx docs + uses: actions/download-artifact@v2 + with: + name: __build_sphx_docs__ + path: ${{github.workspace}}/_build + + - name: Setup Pages + uses: actions/configure-pages@v5 + - name: Upload artifact + uses: actions/upload-pages-artifact@v3 + with: + path: '_build' + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 \ No newline at end of file diff --git a/.github/workflows/py-sanity-check.yml b/.github/workflows/py-sanity-check.yml new file mode 100644 index 00000000..8ed35b45 --- /dev/null +++ b/.github/workflows/py-sanity-check.yml @@ -0,0 +1,38 @@ +name: py-sanity-checks + +on: [push, pull_request] + +jobs: + build: + runs-on: windows-latest + + steps: + - uses: actions/checkout@v2 + + - name: Setup conda environment + uses: conda-incubator/setup-miniconda@v2 + with: + auto-update-conda: true + python-version: 3.9.1 + - name: Cache conda environment cache + uses: actions/cache@v2 + with: + path: C:\Miniconda\envs\diff_check + key: ${{ runner.os }}-conda-${{ hashFiles('environment.yml') }} + restore-keys: | + ${{ runner.os }}-conda- + id: cache-conda + - name: Create diff_check conda environment if not cached + if: steps.cache-conda.outputs.cache-hit != 'true' + run: | + conda env create -f environment.yml + + - name: Cache pre-commit hooks + uses: actions/cache@v2 + with: + path: ~/.cache/pre-commit + key: ${{ runner.os }}-precommit-${{ hashFiles('.pre-commit-config.yaml') }} + - name: Install pre-commit + run: pip install pre-commit + - name: Run pre-commit + run: conda run --name diff_check --no-capture-output pre-commit run --all-files \ No newline at end of file diff --git a/.gitignore b/.gitignore index f52a3483..d60c7865 100644 --- a/.gitignore +++ b/.gitignore @@ -247,3 +247,9 @@ cython_debug/ # egg-info *.egg-info/ + +####################################### +## Sphinx +####################################### +# get rid of output folder +_build/ diff --git a/.gitmodules b/.gitmodules index 631697ed..ac63e882 100644 --- a/.gitmodules +++ b/.gitmodules @@ -2,9 +2,6 @@ path = deps/googletest url = https://github.com/google/googletest branch = main -[submodule "deps/eigen"] - path = deps/eigen - url = https://gitlab.com/libeigen/eigen.git [submodule "deps/open3d"] path = deps/open3d url = https://github.com/diffCheckOrg/submodule-open3d.git @@ -20,3 +17,6 @@ [submodule "deps/submodule-cilantro"] path = deps/submodule-cilantro url = https://github.com/diffCheckOrg/submodule-cilantro.git +[submodule "deps/eigen"] + path = deps/eigen + url = https://gitlab.com/libeigen/eigen.git diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..b9fa5d3a --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,44 @@ +# Install the pre-commit hooks below with +# 'pre-commit install' + +# Auto-update the version of the hooks with +# 'pre-commit autoupdate' + +# Run the hooks on all files with +# 'pre-commit run --all' + +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + - id: check-ast + exclude: ^deps/ + - id: check-merge-conflict + - id: end-of-file-fixer + # only include python files + files: \.py$ + - id: trailing-whitespace + # only include python files + files: \.py$ + - id: end-of-file-fixer + # only include python files + files: \.py$ + +- repo: https://github.com/pre-commit/mirrors-mypy + rev: 'v1.11.0' + hooks: + - id: mypy + files: (src/gh|tests|invokes\.py) + exclude: src/gh/components/ + additional_dependencies: [ + types-requests==2.31.0, + numpy==2.0.1, + pytest==8.3.1, + types-setuptools>=71.1.0.20240818 + ] + args: [--config=pyproject.toml] + +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.4.4 + hooks: + - id: ruff \ No newline at end of file diff --git a/CMakeLists.txt b/CMakeLists.txt index 115d78e8..d40f59a5 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -137,6 +137,7 @@ if (BUILD_PYTHON_MODULE) set(PYBINDMODULE_NAME diffcheck_bindings) set(PYPI_DIR ${CMAKE_CURRENT_SOURCE_DIR}/src/gh/diffCheck/diffCheck) set(TARGET_DLL_PYPI_DIR ${CMAKE_CURRENT_SOURCE_DIR}/src/gh/diffCheck/diffCheck/dlls) + set(SPHINX_DOC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/doc) download_submodule_project(pybind11) add_subdirectory(deps/pybind11) @@ -162,6 +163,13 @@ if (BUILD_PYTHON_MODULE) ${PYPI_DIR} ) copy_dlls(${TARGET_DLL_PYPI_DIR} ${PYBINDMODULE_NAME}) + # copy the pyd/dlls for the sphinx documentation + add_custom_command(TARGET ${PYBINDMODULE_NAME} POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy + $ + ${SPHINX_DOC_DIR} + ) + copy_dlls(${SPHINX_DOC_DIR} ${PYBINDMODULE_NAME}) endif() #-------------------------------------------------------------------------- @@ -169,5 +177,4 @@ endif() #-------------------------------------------------------------------------- if(BUILD_TESTS) include(tests) -endif() - +endif() \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index eecc1ed7..00000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,366 +0,0 @@ - -# Guide for contributors -Here's you can find some documentations and guidelines to contribute to the source code. - ---- -# Git - -### GitHub commit convetion -All commits need to be labeled with a tag among these: -``` -git commit -m "ADD:" <--- for adding new elements -git commit -m "FIX:" <--- for fixing (errors, typos) -git commit -m "FLASH:" <--- quick checkpoint before refactoring -git commit -m "MILESTONE:" <--- for capping moment in development -git commit -m "CAP:" <--- for for less important milestones -git commit -m "UPDATE:" <--- for moddification to the same file -git commit -m "MISC:" <--- for any other reasons to be described -git commit -m "WIP:" <--- for not finished work -git commit -m "REFACTOR:" <--- for refactored code -git commit -m "MERGE:" <--- for merging operations -``` -You can merge few tags e.g.: -``` -git commit -m "WIP-CAP: <--- for cap moment in not finished work -``` - -### Delete submodule -To delete a submodule in Win, you need to: -1. Delete the relevant section from the `.gitmodules` file. The section would look something like this: -```terminal -[submodule "submodule_name"] - path = submodule_path - url = submodule_url -``` -2. Stage the `.gitmodules` changes: -```terminal -git add .gitmodules -``` -3. (optional) Delete the relevant section from `.git/config`. The section would look something like this: -```terminal -[submodule "submodule_name"] - url = submodule_url -``` -4. Run `git rm --cached path_to_submodule` (no trailing slash). -5. Run `Remove-Item -Recurse -Force .git/modules/path_to_submodule`. -6. Commit the changes: -```terminal -git commit -m "Remove a submodule name" -``` - ---- -# Python -DiffCheck is distributed as a Python Grasshopperr plug-in via yak and its source code via PyPI. The plug-in is composed by a series of `.ghuser` components. - -There are 3 ways you can contribute to the Python GH plug-in: -1. By adding new components to the plug-in. -2. By fixing bugs of existing components in the plug-in. -3. By adding new functionalities to existing components in the plug-in. - -Before committing to the repository you need to have tested the components in the Grasshopper environment and be sure that this is working correctly. Also, provide a sufficient documentation in the PR (for now) please. - -Follow these steps to develop and test the Python GH plug-in: -- [GHPy: A) preparation](#ghpy-a-preparation) -- [GHPy: B) development/debug](#ghpy-b-developmentdebug) -- [GHPy: C) Release](#ghpy-c-release) -- [GHPy: D) Documentation](#ghpy-d-documentation) - -## GHPy: A) preparation -Download this repo if you haven't already. -```terminal -git clone https://github.com/diffCheckOrg/diffCheck.git -``` - -Next, if you used diffCheck before as an end-user clean all the `diffCheck folders` in the following directory (the last name will change): -```terminal -C:\Users\\.rhinocode\py39-rh8\site-envs\default-wMh5LZL3 -``` -> note that if you drop an official released diffCheck component from yak, this one will have the `#r : diffCheck==` notation at the top of the script. Get rid of all these release components before to start and be sur to erase again the previous folders (they recreated each time `#r : diffCheck` is called). - -Build the package from the py source code's directory: -```py -python setup.py sdist bdist_wheel -``` - -Lastly, install the pip pacakge from the repository in editable mode. This way, all the modifications made to the source code of the repository will be reflected in the installed package. Open a terminal and run the following command (replace the path with where you download the repository): -```terminal -C:\Users\\.rhinocode\py39-rh8\python.exe -m pip install -e "\src\gh\diffCheck" -``` - -For your info the packages is installed in `C:\Users\andre\.rhinocode\py39-rh8\Lib\site-packages`. - -That's it you are now a contributor to the diffCheck! We raccomand to not download anymore from yak package but rather use the source code in the repository. If you want the latest diffCheck, checkout and pull the main. - -## GHPy: B) development/debug - -### B.1) Code structure -For DiffCheck there are 2 main folders in the repository: -* `src/gh/diffCheck/components` here you can add new components or modify existing ones (for more info on how to create one we point you to [this documentation](https://github.com/compas-dev/compas-actions.ghpython_components)). Here we call the -* `src/gh/diffCheck/diffCheck` this is our package where the core functionalities are implemented. - -### B.2) Developing component's content -The idea is to start by developing the content of the component in the file `src/gh/diffCheck/diffCgeck_app.py`. This would be a simple script that contains the logic of the component. Once the script `diffCheck_app.py` is working correctly, you can move the code to the component file in the `src/gh/diffCheck/components` folder. This is because the component file is the one that will be componentized and distributed via yak. - -We reccomand to use `VSCode` as IDE for developing the components. This is because it has a good integration with the `Grasshopper` environment and it is easy to debug the components. To set up the IDE follow these steps: -1. Install the `ScriptSync` extension for `VSCode`. -2. Install the `ScriptSync` from the yak manager in Rhino. -3. Open the `diffCheckApp.py` from the `src/gh/diffCheck/components` folder you are working on in `VSCode`, and set its path to the ScriptSync ghcomponent. -4. If you modify the code in `VSCode`, the changes will be reflected in the Grasshopper component as soon as you save in `VSCode` again the `code.py`. -5. Once your code is working, prepare the code and componentize it. - -If you want to use the GHEditor it's ok but everytime you modify the pakcage or the component's code, after any modifications you need to restart the Python interpreter from the ScriptEditor (`Tools > Reload Python3 (CPython) Engine`) and recompute the solution in Grasshopper. - -### B.3) Componentize the code -Prepare your component as explained here. You can componentize it locally and test it in Grasshopper. Here's how to componentize: -```terminal -python .\invokes\ghcomponentize\ghcomponentizer.py --ghio "C:\Users\andre\.nuget\packages\grasshopper\8.2.23346.13001\lib\net48\" .\src\gh\components\ .\build\gh -``` -> Note that you need to find the path to your GHIO folder. This is the folder where the `Grasshopper.dll` is located. E.g. You can find it in the `nuget` folder in the Rhino installation directory. - -Once you are sure that the component is working correctly, you can push the changes to the repository. - -## GHPy: C) Release -The release will be made via CI from main. As a contributor you don't need to worry about this. The plug-in is componentized, pushed to yak/PyPI and the user can download the latest version from yak. - -## GHPy: D) Documentation -More to come. - - - - ---- -# C++ - -### Naming & synthax convention -Here's the naming convention for this project: -- ` `: lowerCamelCase. -- `type PublicVariable`: public member of a class -- `type m_PrivateVariable`: Hungarian notation with UpperCamelCase for private class members. -- `static type s_StaticVariable`: Hungarian notation with UpperCamelCase for static members of class. -- `APP_SPEC`: Constants with SNAKE_UPPER_CASE. -- All the other naming uses UpperCamelCase. - -Here's an example: -```c++ -// do not use using namespace std; we specify the namespace everytime -std::foo() - -// next line graph style -void Foo() -{ - /* content */ -} - -// structure name uses UpperCamelCase -struct AnExampleStruct -{ - // structure attribute uses UpperCamelCase - const char* Name; -}; - -// class name uses UpperCamelCase -class AnExampleClass -{ -public: - AnExampleClass(const int& init); - virtual ~AnExampleClass(); - - // member functions use UpperCamelCase - void PublicMemberFunction() - { - // local variable uses lowerCamelCase - int localVariable = 0; - } - -// A field indicator to separate the functions and attributes -public: - int PublicVariable; - -// Private member function block -private: - // member functions use UpperCamelCase - void PrivateMemberFunction(); - -// Also a field indicator to separate the functions and attributes -private: - // private variables uses Hungarian notation with UpperCamelCase - int m_PrivateVariable; // m_VariableName for normal variable - static int s_Instance; // s_VariableName for static variable -}; - -// Start headers with -#pragma once - -// Start declarations with precompiled headers -#include "aiacpch.h" -``` - -### Only smart (or unique) pointers -It's 2024, we can pass on raw pointers. We use smart pointers. -```c++ -std::unique_ptr example = std::make_unique(0); -``` -Or if you really need to use an unique pointer because you don't want to transfer the ownership of the object, use a shared pointer. -```c++ -std::shared_ptr example = std::make_shared(0); -``` - -### Debugging with GDB -We use GDB for debugging. To install GDB on windows, do the following: -1. Download the MSYS2 installer from the [MSYS2 website](https://www.msys2.org/). -2. Run the installer and follow the instructions in the [MSYS2 installation guide](https://www.msys2.org/wiki/MSYS2-installation/). -3. Open the MSYS2 terminal and update the core package database: -```bash -pacman -Syu -``` -4. Install the GDB debugger: -```bash -pacman -S mingw-w64-x86_64-gdb -``` -5. Add the GDB to the system path in PATH_ENVIRONMENT: -6. Close the terminal sessions you where using and open a new one. Now you can use GDB. -```bash -gdb "path-to-executable" -``` -> use `run` to start the program and `quit` to exit the debugger. -> use `break` to set a breakpoint and `continue` to continue the execution of the program. -> use `bt` to see the backtrace of the program when a segfault occurs. - - - -### Logging -To log use the following MACROS. All the code is contained in `log.hh` and `log.cc`. -```c++ -DIFFCHECK_INFO("test_core_info"); -DIFFCHECK_WARN("test_core_warn"); -DIFFCHECK_ERROR("test_core_error"); -DIFFCHECK_FATAL("test_core_critical"); -``` -The output is like so: -```bash -2024-03-30 12:53:29.971 ( 0.000s) [ ADF6D348] diffCheckApp.cc:24 INFO| test_core_info -2024-03-30 12:53:29.972 ( 0.000s) [ ADF6D348] diffCheckApp.cc:25 WARN| test_core_warn -2024-03-30 12:53:29.972 ( 0.000s) [ ADF6D348] diffCheckApp.cc:26 ERR| test_core_error -2024-03-30 12:53:29.972 ( 0.000s) [ ADF6D348] diffCheckApp.cc:27 FATL| test_core_critical -``` -The logging can be silenced by setting ON the option in the main `CMakeLists.txt`. -```cmake -option(SILENT_LOGGING "Do not log messages in the terminal of on." ON) -``` - -### I/O and basic datatypes -Here's how you can import point cloud from file: -```c++ -#include "diffCheck/geometry/DFPointCloud.hh" -#include "diffCheck/geometry/DFMesh.hh" - -// clouds -std::shared_ptr dfPointCloudPtr = std::make_shared(); -std::string pathMesh = R"(C:\Users\yourfilecloudpath.ply)"; -dfPointCloudPtr->LoadFromPLY(pathCloud); - -// mesh -std::shared_ptr dfMeshPtr = std::make_shared(); -std::string pathCloud = R"(C:\Users\yourfilemeshpath.ply)"; -dfMeshPtr->LoadFromPLY(pathMesh); -``` - -### Visualizer - -Clouds and mesh can be visualized like this: -```c++ -#include "diffCheck/visualizer/DFVisualizer.hh" - -// clouds -std::shared_ptr dfVisualizerPtr = std::make_shared(); -dfVisualizerPtr->AddPointCloud(dfPointCloudPtr); -dfVisualizerPtr->Run(); - -// mesh -std::shared_ptr dfVisualizerPtr = std::make_shared(); -dfVisualizerPtr->AddMesh(dfMeshPtr); -dfVisualizerPtr->Run(); -``` - - -### Test suite -In df we use `CTest` as a test framework managed by Cmake in the file `cmake/tests.cmake` to run c++ tests with `GoogleTest` and for python in `PyTest`. - -Tests are in the `tests` folder: -```terminal -F:\DIFFCHECK\TESTS -│ allCppTests.cc -│ -├───integration_tests <-- mainly python interfaces -│ ├───ghcomponents_tests <-- relative to gh components -│ │ .gitkeep -│ │ -│ ├───package_tests <-- relative to the pypi package -│ │ .gitkeep -│ │ -│ └───pybinds_tests <-- strictly pybinding -│ │ diffCheck.dll -│ │ diffcheck_bindings.cp39-win_amd64.pyd -│ │ Open3D.dll -│ │ test_pybind_pyver.py -│ │ test_pybind_units.py -│ -├───test_data <-- here is where we put some .ply data -│ roof_quarter.ply -│ -└───unit_tests <-- c++ backend, one for each header - DFLog.cc - DFPointCloudTest.cc -``` - -To run the tests, you can use the following commands: -```terminal -cmake -S . -B build -A x64 -DBUILD_PYTHON_MODULE=ON -DBUILD_TESTS=ON -DRUN_TESTS=ON -cmake --build build --config Release -``` - -## Write C++ tests -To write a test, you need to create a new file in the `tests/unit_tests` folder. Next add your file in the executable `${CPP_UNIT_TESTS}` in the `cmake/tests.cmake`. -e.g.: -https://github.com/diffCheckOrg/diffCheck/blob/e080a93cdd73d96efb0686f80bf13730e0b8efa3/cmake/tests.cmake#L13-L17 - -## Write Python tests -To write a test, you need to create a new file in the `tests/integration_tests` folder. Write a new `.py` test file and add it in the `cmake/tests.cmake` in the `add_test` function. -e.g.: -https://github.com/diffCheckOrg/diffCheck/blob/e080a93cdd73d96efb0686f80bf13730e0b8efa3/cmake/tests.cmake#L45-L48 \ No newline at end of file diff --git a/README.md b/README.md index 0b9254e8..5ea42c12 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,7 @@ + PyPI - Version

@@ -36,6 +37,7 @@ gantt Data collection and evaluation :dataeval, after fabrob, 4w ``` + ## 3rd party libraries The project uses the following 3rd party libraries: diff --git a/deps/cgal/auxiliary/gdb/python/CGAL/printers.py b/deps/cgal/auxiliary/gdb/python/CGAL/printers.py index c639b9f4..4ed9607e 100644 --- a/deps/cgal/auxiliary/gdb/python/CGAL/printers.py +++ b/deps/cgal/auxiliary/gdb/python/CGAL/printers.py @@ -23,7 +23,7 @@ def lookup_function (val): # Get the unqualified type, stripped of typedefs. type = type.unqualified ().strip_typedefs () - # Get the type name. + # Get the type name. typename = type.tag if typename == None: return None @@ -34,7 +34,7 @@ def lookup_function (val): for function in CGAL_pretty_printers_dict: if function.search (typename): return CGAL_pretty_printers_dict[function] (val) - + # Cannot find a pretty printer. Return None. return None @@ -43,7 +43,7 @@ def lookup_function (val): class CGAL_Handle_for: def __init__(self, val): self.val = val - + def to_string (self): node = self.val['ptr_'].dereference() return 'Handle_for(%s , refcount=%d)' % (node['t'],node['count']) @@ -51,7 +51,7 @@ def to_string (self): class CGAL_Point_2: def __init__(self, val): self.val = val - + def to_string (self): node = self.val; type = self.val.type @@ -64,7 +64,7 @@ def to_string (self): class CGAL_Tdsvb3: def __init__(self, val): self.val = val - + def to_string (self): node = self.val; return 'CGAL::Tvb_3(%s)' % node['_p'] @@ -72,7 +72,7 @@ def to_string (self): class CGAL_Point_3: def __init__(self, val): self.val = val - + def to_string (self): node = self.val; type = self.val.type @@ -86,7 +86,7 @@ class CGAL_Vector_2: def __init__(self, val, name): self.val = val self.name = name - + def to_string (self): node = self.val['base'] return 'CGAL::%s(%s)' % (self.name, node) @@ -94,7 +94,7 @@ def to_string (self): class CGAL_Array: def __init__(self, val): self.val = val - + def to_string (self): node = self.val['_M_instance'] return node @@ -102,7 +102,7 @@ def to_string (self): class CGAL_Boost_tuples: def __init__(self, val): self.val = val - + def to_string (self): return '{%s}' % self.display_head_and_continue(self.val) @@ -135,4 +135,3 @@ def display_head_and_continue(self, val): CGAL_pretty_printers_dict[re.compile('^(std|boost)(::tr1)?(::tuples)?::tuple<.*>')] = lambda val: CGAL_Boost_tuples(val) #p2 = gdb.selected_frame().read_var('p2') - diff --git a/deps/cgal/auxiliary/gdb/test-gdb.py b/deps/cgal/auxiliary/gdb/test-gdb.py index b5a2b077..0ce6dec8 100644 --- a/deps/cgal/auxiliary/gdb/test-gdb.py +++ b/deps/cgal/auxiliary/gdb/test-gdb.py @@ -11,8 +11,5 @@ import sys import os -import gdb sys.path.insert(0, os.getcwd() + '/python') - -import CGAL.printers diff --git a/deps/cgal/cmake/modules/Help/cmake.py b/deps/cgal/cmake/modules/Help/cmake.py index ce321e0b..00d872f1 100644 --- a/deps/cgal/cmake/modules/Help/cmake.py +++ b/deps/cgal/cmake/modules/Help/cmake.py @@ -80,7 +80,7 @@ def run(self): settings.record_dependencies.add(path) f = io.FileInput(source_path=path, encoding=encoding, error_handler=e_handler) - except UnicodeEncodeError as error: + except UnicodeEncodeError: raise self.severe('Problems with "%s" directive path:\n' 'Cannot encode input file path "%s" ' '(wrong locale?).' % diff --git a/deps/eigen b/deps/eigen index 0b646f3f..92e373e6 160000 --- a/deps/eigen +++ b/deps/eigen @@ -1 +1 @@ -Subproject commit 0b646f3f36ed93b9baed305fc337177357acd9e9 +Subproject commit 92e373e6f553dd842337c8467f4ddb669e0e9199 diff --git a/deps/googletest b/deps/googletest index c231e6f5..ff233bdd 160000 --- a/deps/googletest +++ b/deps/googletest @@ -1 +1 @@ -Subproject commit c231e6f5b152029dbd5fa4a9e0c04095035aec3f +Subproject commit ff233bdd4cac0a0bf6e5cd45bda3406814cb2796 diff --git a/deps/pybind11 b/deps/pybind11 index 72330728..a1d00916 160000 --- a/deps/pybind11 +++ b/deps/pybind11 @@ -1 +1 @@ -Subproject commit 723307283ed7dc03c901fba1da1127c7b16a4a0d +Subproject commit a1d00916b26b187e583f3bce39cd59c3b0652c32 diff --git a/doc/README.md b/doc/README.md new file mode 100644 index 00000000..12e00425 --- /dev/null +++ b/doc/README.md @@ -0,0 +1,2 @@ +To rebuild the documentation, +see [Update Documentation](https://jax.readthedocs.io/en/latest/developer.html#update-documentation). diff --git a/doc/_autosummary/.gitkeep b/doc/_autosummary/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/doc/_static/correct_install_check.png b/doc/_static/correct_install_check.png new file mode 100644 index 00000000..77116db6 Binary files /dev/null and b/doc/_static/correct_install_check.png differ diff --git a/doc/_static/fron_log.png b/doc/_static/fron_log.png new file mode 100644 index 00000000..4fda9ce4 Binary files /dev/null and b/doc/_static/fron_log.png differ diff --git a/doc/_static/front_bothroundsquare.png b/doc/_static/front_bothroundsquare.png new file mode 100644 index 00000000..40a5f1da Binary files /dev/null and b/doc/_static/front_bothroundsquare.png differ diff --git a/doc/_static/front_square.png b/doc/_static/front_square.png new file mode 100644 index 00000000..ef040860 Binary files /dev/null and b/doc/_static/front_square.png differ diff --git a/doc/_static/gh_components/gh_DFVizSettings_example.png b/doc/_static/gh_components/gh_DFVizSettings_example.png new file mode 100644 index 00000000..5f953a77 Binary files /dev/null and b/doc/_static/gh_components/gh_DFVizSettings_example.png differ diff --git a/doc/_static/logo_favicon.ico b/doc/_static/logo_favicon.ico new file mode 100644 index 00000000..d9726f92 Binary files /dev/null and b/doc/_static/logo_favicon.ico differ diff --git a/doc/_static/logo_sphinx.png b/doc/_static/logo_sphinx.png new file mode 100644 index 00000000..56759dc9 Binary files /dev/null and b/doc/_static/logo_sphinx.png differ diff --git a/doc/_static/style.css b/doc/_static/style.css new file mode 100644 index 00000000..7a5c6470 --- /dev/null +++ b/doc/_static/style.css @@ -0,0 +1,43 @@ +@import url("theme.css"); + +:root { + --block-bg-opacity: .5; +} + +.wy-side-nav-search { + background-color: #fff; +} + +.getting-started { + background-color: rgba(78, 150, 253, var(--block-bg-opacity)); +} + +.user-guides { + background-color: rgba(0, 169, 154, var(--block-bg-opacity)); +} + +.developer-docs { + background-color: rgba(171, 0, 182, var(--block-bg-opacity)); +} + +div.red-background pre { + background-color: rgba(244, 204, 204, var(--block-bg-opacity)); +} + +div.green-background pre { + background-color: rgba(204, 244, 204, var(--block-bg-opacity)); +} + +/* Python code block comments */ +html[data-theme="light"] .highlight span.c1 { + color: #fa8d59; +} + +/* Python code traceback and exception */ +html[data-theme="light"] .highlight span.gt { + color: #ff0000; +} + +html[data-theme="light"] .highlight span.gr { + color: #ff0000; +} diff --git a/doc/_templates/layout.html b/doc/_templates/layout.html new file mode 100644 index 00000000..4c57ba83 --- /dev/null +++ b/doc/_templates/layout.html @@ -0,0 +1,2 @@ +{% extends "!layout.html" %} +{% set css_files = css_files + ["_static/style.css"] %} diff --git a/doc/assembly_intro.md b/doc/assembly_intro.md new file mode 100644 index 00000000..236bcee5 --- /dev/null +++ b/doc/assembly_intro.md @@ -0,0 +1,4 @@ +(assembly_intro)= +# DfAssembly + +/// introductio nand example on how to build an assembly with diffCheck (showing also deconstructing and a general scheme of the assembly architecture) \ No newline at end of file diff --git a/doc/change_log.md b/doc/change_log.md new file mode 100644 index 00000000..b1fc1af4 --- /dev/null +++ b/doc/change_log.md @@ -0,0 +1,4 @@ +(change_log)= +# Change log + +/// find a way to automatize the change log \ No newline at end of file diff --git a/doc/compute_error_intro.md b/doc/compute_error_intro.md new file mode 100644 index 00000000..f886abca --- /dev/null +++ b/doc/compute_error_intro.md @@ -0,0 +1,5 @@ +(compute_error_intro)= +# Compute cloud-CAD error + +/// tutorial on how to compute the error between a point cloud and a CAD model with diffCheck component. It can be slit in two parts: single element - full structure + diff --git a/doc/conf.py b/doc/conf.py new file mode 100644 index 00000000..6502b15d --- /dev/null +++ b/doc/conf.py @@ -0,0 +1,342 @@ +# ----------------------------------------------------------------------------- +# Modified version of the JAX documentation configuration file for the DiffCheck project. +# Modified by: The DiffCheck authors +# Copyright 2024, The DiffCheck Authors. +# +# The original copyright notice is included below. +# Copyright 2018 The JAX Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# -*- coding: utf-8 -*- +# +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import inspect +import operator +import os +import sys + +# -- Dlls/pyd imports --------------------------------------------------------- +# import package's modules path and dlls/pyds, checking for import of pybind module +extra_dll_dir_doc = os.path.abspath('./') +extra_dll_dir_pysource = os.path.abspath('./../src/gh/diffCheck') + +os.add_dll_directory(extra_dll_dir_doc) # For finding DLL dependencies on Windows +sys.path.insert(0, extra_dll_dir_doc) +sys.path.insert(0, extra_dll_dir_pysource) +try: + # check general pacakae import + import diffCheck + print(f"diffCheck's version: {diffCheck.__version__}") + + # check bindings + import diffCheck.diffcheck_bindings as dfb +except ImportError as e: + print(f"Failed to import diffcheck_bindings: {e}") + print("Current sys.path directories:") + for path in sys.path: + print(path) + print("Current files in the directory:") + for file in os.listdir(extra_dll_dir_doc): + print(file) + sys.exit(1) + +# -- Project information ----------------------------------------------------- + +project = 'DiffCheck' +copyright = '2024, The DiffCheck Authors. We use ReadTheDocs, SPhinx, SphinxBookTheme and Jax theme-flavoured which are copyright their respective authors.' +author = 'The DiffCheck authors' + +# The short X.Y version +version = '0.1' +# The full version, including alpha/beta/rc tags +release = '' + + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +needs_sphinx = '2.1' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.intersphinx', + 'sphinx.ext.linkcode', + 'sphinx.ext.mathjax', + 'sphinx.ext.napoleon', + 'myst_nb', + "sphinx_remove_toctrees", + 'sphinx_copybutton', + 'sphinx_design', + 'sphinxcontrib.mermaid', + 'sphinx_autodoc_typehints', + + 'sphinx_ghcomponent_parser', # custom extension +] + +intersphinx_mapping = { + 'python': ('https://docs.python.org/3/', None), + 'numpy': ('https://numpy.org/doc/stable/', None), + 'scipy': ('https://docs.scipy.org/doc/scipy/reference/', None), +} + +suppress_warnings = [ + 'ref.citation', # Many duplicated citations in numpy/scipy docstrings. + 'ref.footnote', # Many unreferenced footnotes in numpy/scipy docstrings + 'myst.header', + # TODO(jakevdp): remove this suppression once issue is fixed. + 'misc.highlighting_failure', # https://github.com/ipython/ipython/issues/14142 +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +source_suffix = ['.rst', '.md'] + +# The main toctree document. +main_doc = 'index' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [ + # Sometimes sphinx reads its own outputs as inputs! + 'build/html', + # 'build/jupyter_execute', + # 'notebooks/README.md', + 'README.md', + # Ignore markdown source for notebooks; myst-nb builds from the ipynb + # These are kept in sync using the jupytext pre-commit hook. + # 'notebooks/*.md', + # 'pallas/quickstart.md', + # 'pallas/tpu/pipelining.md', + # 'pallas/tpu/matmul.md', + # 'jep/9407-type-promotion.md', + # 'autodidax.md', + # 'sharded-computation.md', + # 'ffi.ipynb', +] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = None + + +autosummary_generate = True +napolean_use_rtype = False + + +# -- Mocking ----------------------------------------------------------------- +autodoc_mock_imports = [ + # Rhino/gh specifics + "Rhino", + "rhinoscriptsyntax", + "scriptcontext", + "Grasshopper", + + # Windows specific + "System", + ] + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_book_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + 'show_toc_level': 2, + 'repository_url': 'https://github.com/diffCheckOrg/diffCheck', + 'use_repository_button': True, # add a "link to repository" button + 'navigation_with_keys': False, +} + +# Add any paths that contain custom +# static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +html_logo = '_static/logo_sphinx.png' +html_favicon = '_static/logo_favicon.ico' + +html_css_files = [ + 'style.css', +] + + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} + +# -- Options for myst ---------------------------------------------- +myst_heading_anchors = 3 # auto-generate 3 levels of heading anchors +myst_enable_extensions = ['dollarmath'] +nb_execution_mode = "force" +nb_execution_allow_errors = False +nb_merge_streams = True +nb_execution_show_tb = True + +# Notebook cell execution timeout; defaults to 30. +nb_execution_timeout = 100 + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = 'DFdoc' + + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (main_doc, 'DF.tex', 'DF Documentation', + 'The DF authors', 'manual'), +] + + +# -- Options for manual page output ------------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (main_doc, 'df', 'DF Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ---------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (main_doc, 'DF', 'DF Documentation', + author, 'DF', 'One line description of project.', + 'Miscellaneous'), +] + + +# -- Options for Epub output ------------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = project + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +# +# epub_identifier = '' + +# A unique identification for the text. +# +# epub_uid = '' + +# A list of files that should not be packed into the epub file. +epub_exclude_files = ['search.html'] + + +# -- Extension configuration ------------------------------------------------- + +# Tell sphinx autodoc how to render type aliases. +autodoc_typehints = "description" +autodoc_typehints_description_target = "all" +autodoc_type_aliases = { + 'ArrayLike': 'jax.typing.ArrayLike', + 'DTypeLike': 'jax.typing.DTypeLike', +} + +# Remove auto-generated API docs from sidebars. They take too long to build. +remove_from_toctrees = ["_autosummary/*"] + +# Customize code links via sphinx.ext.linkcode + +def linkcode_resolve(domain, info): + import diffCheck + + if domain != 'py': + return None + if not info['module']: + return None + if not info['fullname']: + return None + if info['module'].split(".")[0] != 'df': + return None + try: + mod = sys.modules.get(info['module']) + obj = operator.attrgetter(info['fullname'])(mod) + if isinstance(obj, property): + obj = obj.fget + while hasattr(obj, '__wrapped__'): # decorated functions + obj = obj.__wrapped__ + filename = inspect.getsourcefile(obj) + source, linenum = inspect.getsourcelines(obj) + except: + return None + filename = os.path.relpath(filename, start=os.path.dirname(diffCheck.__file__)) + lines = f"#L{linenum}-L{linenum + len(source)}" if linenum else "" + return f"https://github.com/diffCheckOrg/diffCheck/blob/main/diffCheck/{filename}{lines}" diff --git a/doc/contribute.md b/doc/contribute.md new file mode 100644 index 00000000..11c41e63 --- /dev/null +++ b/doc/contribute.md @@ -0,0 +1,52 @@ +(contrib_guide)= +# Contribute + +We welcome pull requests from everyone. Please have a look at the [issue](https://github.com/diffCheckOrg/diffCheck/issues) list to see if there is something you can help with. If you have a new feature in mind, please open an issue to discuss it first. + +## Code quality + +We run [mypy](https://mypy.readthedocs.io/en/stable/index.html) and [Ruff](https://docs.astral.sh/ruff/) for e.g. python on pre-commit hooks to ensure code quality. +Please make sure to: +1. (when you `git clone` the repo) to install the *pre-commit hooks*: + + ```console + pre-commit install + ``` +2. to run the following commands before submitting a pull request: + + ```console + pre-commit run --all-files + ``` + +## How to contribute + +Follow these steps to contribute to the project: + +1. Fork the diffCheck repository by clicking the **Fork** button on the [diffCheck repository](https://github.com/diffCheckOrg/diffCheck). Clone the repository to your local machine: + + ```console + git clone https://github.com/YOUR_USERNAME/diffCheck.git + cd diffCheck + ``` + +2. Create a new branch for your feature: + + ```console + git checkout -b my-feature + ``` + +3. Add the diffCheck repository as a remote for convinience: + + ```console + git remote add upstream https://github.com/diffCheckOrg/diffCheck + ``` + +4. Next you will need to set up your development environment. You can find the instructions in the [development installation guide](dev_env). + +5. Work on your feature (follow [c++](cpp_conv) or [py](py_conv) style guide) and commit your changes by following the [commit message guidelines](git_commit_system): + + ```console + git add . + git commit -m "WIP: Add my feature" + git push origin my-feature + ``` \ No newline at end of file diff --git a/doc/dev_documentation.rst b/doc/dev_documentation.rst new file mode 100644 index 00000000..4603eeee --- /dev/null +++ b/doc/dev_documentation.rst @@ -0,0 +1,16 @@ +.. _dev_df_doc: + +Developer documentation +======================= + +Here you will find all the necessary as a developer to contribute to the project. + +.. toctree:: + :maxdepth: 1 + :caption: Developer documentation + + contribute + development_env + style + testing + documentation \ No newline at end of file diff --git a/doc/development_env.md b/doc/development_env.md new file mode 100644 index 00000000..2e90085f --- /dev/null +++ b/doc/development_env.md @@ -0,0 +1,133 @@ +(dev_env)= +# Development environment + +If you develop for DF, you need to set up your development environment. This guide will help you to do that. Wether you are developing for the `c++` or `python` part of the project, you will find the necessary information here. + +## Prepare your environment + +Before to start, especially if you used diffCheck as an end-user before you will need to: + +1. Make sure to have `camke` installed on your machine. You can download it [here](https://cmake.org/download/). +2. Make sure to have `git` installed on your machine. You can download it [here](https://git-scm.com/downloads). +3. We recommend to use `Visual Studio Code` as an IDE. You can download it [here](https://code.visualstudio.com/) together with [script-sync](https://github.com/ibois-epfl/script-sync) extension for Rhino/Grasshopper. You can download it from the Rhino's `PackageManager`. It is particularly useful if you want to develop [GHComponents](gh_components_gd) in python. +4. if you used diffCheck before as an end-user clean all the `diffCheck folders` in the following directory (the last name will change), beware that Rhino should be closed before this step: + ```console + C:\Users\\.rhinocode\py39-rh8\site-envs\default-wMh5LZL3 + ``` + + ```{important} + if you drop an official released diffCheck component from yak, this one will have the `#r : diffCheck==` notation at the top of the script. Get rid of all these release components before to start and be sur to erase again the previous folders (they recreated each time `#r : diffCheck` is called). + ``` + +5. Clone the repository on your machine. Open a terminal and run the following command: + ```console + git clone https://github.com/diffCheckOrg/diffCheck + ``` + +6. Checkout the repo: + ```console + cd diffCheck + ``` + +7. Run cmake utilities `.bat`s files to config and build: + ```console + ./cmake/config.bat + ./cmake/build.bat + ``` + +8. Build the python df package from the py source code's directory: + ```console + cd src/gh/diffCheck + python setup.py sdist bdist_wheel + ``` + +9. Last, install the pip pacakge from the repository in editable mode. This way, all the modifications made to the source code of the repository will be reflected in the installed package. Open a terminal and run the following command (replace the path with where you download the repository): + ```console + C:\Users\\.rhinocode\py39-rh8\python.exe -m pip install -e "\src\gh\diffCheck" + ``` + + ```{note} + For your info the packages is installed in `C:\Users\andre\.rhinocode\py39-rh8\Lib\site-packages`. + ``` + +That's it you are now a contributor to the diffCheck! We raccomand to not download anymore from yak package but rather use the source code in the repository. If you want the latest diffCheck, checkout and pull the main. + +--- + +(c-df-build)= +## C++ DF build +We mainly code in C++ to have heavy-lifting operations accessible via a [pybind11 interface](../src/diffCheckBindings.cc). If you or someone else has modified one of two follow these steps: + +1. Checkout the repository: + ```console + cd diffCheck + ``` +2. Run cmake utilities `.bat`s files to config and build: + ```console + ./cmake/config.bat + ./cmake/build.bat + ``` +3. All the C++'s targets should be now built. + +--- + +## Python DF build +There are 3 ways to develop in python in DF, often you will do both at the same time: +* Develop `GHComponents` in python +* Develop the `pybind11` interface in c++ +* Develop the Python's `diffCheck` API + +(gh_components_gd)= +### a) Develop `GHComponents` in API +We follow the Compas's method to generate [DF python components](gh_dfcomp) in Grasshopper, have a look at their [guide](https://github.com/compas-dev/compas-actions.ghpython_components). All the components are in `src/gh/components`. To develop a new component, you will need to follow these steps: + +1. Create a new folder in `src/gh/components` with the name of your component. +2. Create 3 files with the following names in it: + * a) `code.py`: this is where you code goes + * b) `icon.png`: this is the icon of your component + * c) `metadata.json`: this is the metadata of your component (for more info follow [Compas guidelines](https://github.com/compas-dev/compas-actions.ghpython_components?tab=readme-ov-file#metadata)) + + your `code.py` should look like this: + + ```{eval-rst} + .. literalinclude:: ../src/gh/components/DF_tester/code.py + :language: python + :linenos: + :caption: `DF_tester component <../src/gh/components/DF_tester/code.py>`_ + ``` +3. To test it in Grasshopper, drop a new `script-sync` component in Grasshopper, point it to the `code.py` file and add `diffCheck` to the packages to reload of the component. +4. Finally, you will need to add the following on your last line of the `code.py` file: + ```python + if __name__ == "__main__": + comp = DF_tester() + o_value : bool = comp.Run() + ``` + + ```{warning} + This is necessary to run the component in the Rhino's python editor but it should be **removed** when done. + ``` +5. Once you are satisfied you can componentize it by running: + ```console + invoke ghcomponentize + ``` + This will generate the component in the `build/gh` folder. Grab yours and drop it on the Grasshopper canvas, be sure that this is working as expected. + + ```{hint} + If you pull a new version of the source code with new components you will need to run this command to update the generate the components, erase the old ones in the `ghuser` folder and add the new ones. + ``` +6. Done! You have now a new component in the `ghuser` tab of Grasshopper. + +(pybind_gd)= +### b) Develop the `pybind11` interface in c++ + +Have a look at [C++ DF build](#c-df-build) to build the c++ project. The `pybind11` interface is in the `src/diffCheckBindings.cc` file. Write your new functions or namespace in this file. This is basically a `.dll` so for Rhino/Grasshopper to be visible you need to first close Rhino and run `cmake/build.bat` to build the project. Once done, you can open Rhino and test your new wrap functions in the Rhino's python editor. + + +(dfpypack_gd)= +### c) Develop the Python's `diffCheck` API + +All the source code is in the `src/gh/diffCheck/diffCheck` folder. If you add new modules or code to existing one and you are using `script-sync` to test your code in Grasshopper, your changes will be immediately reflected. Have a look at the [diffCheck Python API](diffCheck_API) for more info. + +```{note} +If you want to test your code in the Rhino's python editor, you will need to install the package in editable mode. Have a look at the [Prepare your environment](#prepare-your-environment) section for more info. +``` \ No newline at end of file diff --git a/doc/df_architecture.md b/doc/df_architecture.md new file mode 100644 index 00000000..89e5c06d --- /dev/null +++ b/doc/df_architecture.md @@ -0,0 +1,4 @@ +(df_architecture_guide)= +# diffCheck architecture + +/// more info and details about how the diffCheck architecture works as a plugin \ No newline at end of file diff --git a/doc/diffCheck.df-b_geometries.rst b/doc/diffCheck.df-b_geometries.rst new file mode 100644 index 00000000..ebd6fecb --- /dev/null +++ b/doc/diffCheck.df-b_geometries.rst @@ -0,0 +1,62 @@ +.. _diffCheck-df-b_geometries: + +``diffCheck.df-b_geometries`` modules +===================================== + +This page contains 2 modules for the geometries: + +1. :mod:`diffCheck.df_geometries` module contains all the objects and functions to handle geometries of a structure in the DiffCheck library. +2. :mod:`diffCheck.diffcheck_bindings.dfb_geometry` module contains all the objects and functions to handle mesh and point cloud geometries in diffCheck. + + +.. _diffCheck-df_geometries: + +``diffCheck.df_geometries`` module +================================== +.. currentmodule:: diffCheck.df_geometries + +This module represent the geometry of a structure in the DiffCheck library. It contains the following classes: + +- :class:`diffCheck.df_geometries.DFAssembly` +- :class:`diffCheck.df_geometries.DFBeam` +- :class:`diffCheck.df_geometries.DFJoint` +- :class:`diffCheck.df_geometries.DFFace` +- :class:`diffCheck.df_geometries.DFVertex` + +This is how these geometris are related: + +.. mermaid:: + + stateDiagram-v2 + DFAssembly + state DFAssembly { + + [*] --> DFBeam + state DFBeam { + + [*] --> DFJoint + DFJoint --> DFFace + + state DFFace { + [*] --> DFVertex + } + } + } + +.. caution:: + + The :class:`diffCheck.df_geometries.DFJoint` is only generated when accessed from the :class:`diffCheck.df_geometries.DFAssembly` or :class:`diffCheck.df_geometries.DFBeam` objects. It exists only as a convinience container for the joints. + +.. automodule:: diffCheck.df_geometries + :members: + :undoc-members: + :show-inheritance: + +``diffCheck.diffcheck_bindings.dfb_geometry`` module +==================================================== +.. currentmodule:: diffCheck.diffcheck_bindings.dfb_geometry + +.. automodule:: diffCheck.diffcheck_bindings.dfb_geometry + :members: + :undoc-members: + :show-inheritance: \ No newline at end of file diff --git a/doc/diffCheck.df_cvt_bindings.rst b/doc/diffCheck.df_cvt_bindings.rst new file mode 100644 index 00000000..05c4dfe4 --- /dev/null +++ b/doc/diffCheck.df_cvt_bindings.rst @@ -0,0 +1,11 @@ +``diffCheck.df_cvt_bindings`` module +==================================== + +.. currentmodule:: diffCheck.df_cvt_bindings + +This module contains all the objects and functions to handle the conversion between, diffCheck (in particular :mod:`diffCheck.diffcheck_bindings`) to/from Rhino. + +.. automodule:: diffCheck.df_cvt_bindings + :members: + :undoc-members: + :show-inheritance: \ No newline at end of file diff --git a/doc/diffCheck.df_error_estimation.rst b/doc/diffCheck.df_error_estimation.rst new file mode 100644 index 00000000..403140f2 --- /dev/null +++ b/doc/diffCheck.df_error_estimation.rst @@ -0,0 +1,9 @@ +``diffCheck.df_error_estimation`` module +======================================== + +.. currentmodule:: diffCheck.df_error_estimation + +.. automodule:: diffCheck.df_error_estimation + :members: + :undoc-members: + :show-inheritance: \ No newline at end of file diff --git a/doc/diffCheck.df_joint_detector.rst b/doc/diffCheck.df_joint_detector.rst new file mode 100644 index 00000000..11d58aff --- /dev/null +++ b/doc/diffCheck.df_joint_detector.rst @@ -0,0 +1,11 @@ +``diffCheck.df_joint_detector`` module +====================================== + +.. currentmodule:: diffCheck.df_joint_detector + +The joint detector is detecting all the joints, transforming in :class:`diffCheck.df_geometries.DFBeams` and then assembling them in :class:`diffCheck.df_geometries.DFAssembly`. + +.. automodule:: diffCheck.df_joint_detector + :members: + :undoc-members: + :show-inheritance: \ No newline at end of file diff --git a/doc/diffCheck.df_transformations.rst b/doc/diffCheck.df_transformations.rst new file mode 100644 index 00000000..78a76a3c --- /dev/null +++ b/doc/diffCheck.df_transformations.rst @@ -0,0 +1,13 @@ +``diffCheck.df_transformations`` module +======================================= + +.. currentmodule:: diffCheck.df_transformations + +.. automodule:: diffCheck.df_transformations + :members: + :undoc-members: + :show-inheritance: + +.. note:: + + In DF wee also have a wrap object representing a numpy-like `4DMatrix` transformation responsible for transforming :class:`diffCheck.diffcheck_bindings.DFCloud` and :class:`diffCheck.diffcheck_bindings.DFMesh`. For convertions see the :mod:`diffCheck.df_cvt_bindings` module. \ No newline at end of file diff --git a/doc/diffCheck.df_util.rst b/doc/diffCheck.df_util.rst new file mode 100644 index 00000000..592adf51 --- /dev/null +++ b/doc/diffCheck.df_util.rst @@ -0,0 +1,9 @@ +``diffCheck.df_util`` module +============================ + +.. currentmodule:: diffCheck.df_util + +.. automodule:: diffCheck.df_util + :members: + :undoc-members: + :show-inheritance: \ No newline at end of file diff --git a/doc/diffCheck.df_visualization.rst b/doc/diffCheck.df_visualization.rst new file mode 100644 index 00000000..7d741c6b --- /dev/null +++ b/doc/diffCheck.df_visualization.rst @@ -0,0 +1,9 @@ +``diffCheck.df_visualization`` module +===================================== + +.. currentmodule:: diffCheck.df_visualization + +.. automodule:: diffCheck.df_visualization + :members: + :undoc-members: + :show-inheritance: \ No newline at end of file diff --git a/doc/diffCheck.dfb_registrations.rst b/doc/diffCheck.dfb_registrations.rst new file mode 100644 index 00000000..b83ccb7a --- /dev/null +++ b/doc/diffCheck.dfb_registrations.rst @@ -0,0 +1,9 @@ +``diffCheck.dfb_registrations`` module +====================================== + +.. currentmodule:: diffCheck.diffcheck_bindings.dfb_registrations + +.. automodule:: diffCheck.diffcheck_bindings.dfb_registrations + :members: + :undoc-members: + :show-inheritance: \ No newline at end of file diff --git a/doc/diffCheck.dfb_segmentation.rst b/doc/diffCheck.dfb_segmentation.rst new file mode 100644 index 00000000..fd11f048 --- /dev/null +++ b/doc/diffCheck.dfb_segmentation.rst @@ -0,0 +1,9 @@ +``diffCheck.dfb_segmentation`` module +===================================== + +.. currentmodule:: diffCheck.diffcheck_bindings.dfb_segmentation + +.. automodule:: diffCheck.diffcheck_bindings.dfb_segmentation + :members: + :undoc-members: + :show-inheritance: \ No newline at end of file diff --git a/doc/diffCheck_PythonAPI.rst b/doc/diffCheck_PythonAPI.rst new file mode 100644 index 00000000..5f752690 --- /dev/null +++ b/doc/diffCheck_PythonAPI.rst @@ -0,0 +1,50 @@ +.. currentmodule:: diffCheck + +.. _diffCheck_API: + +diffCheck Python API +==================== + +DF's Python API is composed by two main source code directories: + +- ``diffCheck`` +- ``diffCheck.diffcheck_bindings`` + +The ``diffCheck`` directory contains the Python API for the DiffCheck's Python API tightly binded to ``RhinoPython``'s modules (``Rhino``, ``rhinoscriptsynthax``, ``scriptcontext`` and ``grasshopper``), while the ``diffCheck.diffcheck_bindings`` directory contains the bindings to the DF's C++ API. In DF we use the ``pybind11`` library to generate the bindings between the C++ and Python APIs and it is reserved to heaevy computational tasks and objects like point clouds, registrations, segmentations, etc. + +.. mermaid:: + + %%{init: {'theme': 'forest'}}%% + stateDiagram-v2 + direction LR + diffCheck_C++API --> diffcheck_bindings(pybind11) + diffcheck_bindings(pybind11) --> RhinoPythonAPI + + state DF_C++API + + state diffcheck_bindings(pybind11) + + state RhinoPythonAPI { + diffCheck_PythonAPI + } + +.. warning:: + + The current DF' Python API is not meant to be used as a standalone library. It is meant to be used always in conjuction with `Rhino` and `Grasshopper` ecosystems. + +Submodules +---------- + +.. toctree:: + :maxdepth: 1 + + diffCheck.df-b_geometries + diffCheck.df_joint_detector + diffCheck.df_error_estimation + diffCheck.df_visualization + diffCheck.df_cvt_bindings + diffCheck.df_transformations + diffCheck.df_util + + diffCheck.dfb_registrations + diffCheck.dfb_segmentation \ No newline at end of file diff --git a/doc/documentation.md b/doc/documentation.md new file mode 100644 index 00000000..0ad63e16 --- /dev/null +++ b/doc/documentation.md @@ -0,0 +1,72 @@ +(doc_guide)= +# Documentation + +In DF we use `Sphinx` to generate the documentation. The documentation is written in `reStructuredText` and `Markdown` and the source files are located in the `doc` folder. The documentation is hosted on `ReadTheDocs` and is automatically updated when a new commit is pushed to the `main` branch. + +```{note} + For more info on how to write `.rst` files, check this [reStructuredText](https://canonical-documentation-with-sphinx-and-readthedocscom.readthedocs-hosted.com/style-guide/) guide. +``` + +## Build locally + +To build locally the documentation to test your changes: +```console +invoke documentize +``` +and to open the documentation in your browser: +```console +start _build/html/index.html +``` +If you modify the `doc`s files and refresh the pages updates will be visible. + +## Contribute to the documentation + +Follow these guides to contribute to the documentation whether you: + +- add a new [GHComponents](ghcomp_doc_g) +- add/modify the [Python API](pyapi_doc_g) +- add a new [tutorial](tutorial_doc_g) + +--- +(ghcomp_doc_g)= +### ✔️ `GHComponent`'s docs + +If you write a new [GHComponents](gh_components.rst) you will most probably already have created and filled a `metadata.json` file. DF uses this file to automatically generate the documentation for the GHComponents. The only thing you need to do is: +* add a new `.rst` file with the name of the component like `gh_DFComponentName.rst` +* add it to the `gh_components.rst` file's `list-table::` + ```{attention} + The `list-table::` is organized in two columns so if not pair add simply two empty strings `-` to the end of the last entry. +* add it to the `toctree::` + +Our custom sphinx extension `sphinx_ghcomponent_parser` will automatically parse the `metadata.json` file and generate the documentation for the GHComponent✨✨. + + +(pyapi_doc_g)= +### ☑️ `Python API`'s docs + +For [Python API documentation](diffCheck_PythonAPI), we use `sphinx-apidoc` to automatically generate the API documentation so the only thing to do is to add beautiful docstrings to the Python code with the following reStructuredText (reST) format style: + +```python + def example_function(param1, param2): + """ + Summary of the function. + + :param param1: Description of `param1`. + :type param1: int + :param param2: Description of `param2`. + :type param2: str + :return: Description of the return value. + :rtype: bool + """ + return True +``` + +(tutorial_doc_g)= +### ✅ `DF Tutorial`'s docs + +If you need to add a new page to the [tutorials](tutorials.rst) (e.g. a [new tutorial](tutorials.rst)), you can do so by adding a new `.rst` file in the `doc` folder and linking it in the `tutorials.rst` file's toctree: + +```{eval-rst} +.. literalinclude:: tutorials.rst + :language: rst + :lines: 6-14 \ No newline at end of file diff --git a/doc/gh_DFBuildAssembly.rst b/doc/gh_DFBuildAssembly.rst new file mode 100644 index 00000000..1123475b --- /dev/null +++ b/doc/gh_DFBuildAssembly.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_build_assembly/icon.png + :align: left + :width: 40px + +``DFBuildAssembly`` component +============================= + +.. ghcomponent_to_rst:: ../src/gh/components/DF_build_assembly \ No newline at end of file diff --git a/doc/gh_DFCADSegmentator.rst b/doc/gh_DFCADSegmentator.rst new file mode 100644 index 00000000..6c955585 --- /dev/null +++ b/doc/gh_DFCADSegmentator.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_CAD_segmentator/icon.png + :align: left + :width: 40px + +``DFCADSegmentator`` component +============================== + +.. ghcomponent_to_rst:: ../src/gh/components/DF_CAD_segmentator \ No newline at end of file diff --git a/doc/gh_DFCloudCloudDistance.rst b/doc/gh_DFCloudCloudDistance.rst new file mode 100644 index 00000000..696313b3 --- /dev/null +++ b/doc/gh_DFCloudCloudDistance.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_cloud_cloud_distance/icon.png + :align: left + :width: 40px + +``DFCloudCloudDistance`` component +================================== + +.. ghcomponent_to_rst:: ../src/gh/components/DF_cloud_cloud_distance \ No newline at end of file diff --git a/doc/gh_DFCloudMeshDistance.rst b/doc/gh_DFCloudMeshDistance.rst new file mode 100644 index 00000000..57c6ae20 --- /dev/null +++ b/doc/gh_DFCloudMeshDistance.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_cloud_mesh_distance/icon.png + :align: left + :width: 40px + +``DFCloudMeshDistance`` component +================================= + +.. ghcomponent_to_rst:: ../src/gh/components/DF_cloud_mesh_distance \ No newline at end of file diff --git a/doc/gh_DFCloudNormalEstimator.rst b/doc/gh_DFCloudNormalEstimator.rst new file mode 100644 index 00000000..3ef3b1e4 --- /dev/null +++ b/doc/gh_DFCloudNormalEstimator.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_cloud_normal_estimator/icon.png + :align: left + :width: 40px + +``DFCloudNormalEstimator`` component +==================================== + +.. ghcomponent_to_rst:: ../src/gh/components/DF_cloud_normal_estimator \ No newline at end of file diff --git a/doc/gh_DFCloudNormalSegmentator.rst b/doc/gh_DFCloudNormalSegmentator.rst new file mode 100644 index 00000000..1fd5e273 --- /dev/null +++ b/doc/gh_DFCloudNormalSegmentator.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_normal_segmentator/icon.png + :align: left + :width: 40px + +``DFCloudNormalSegmentator`` component +====================================== + +.. ghcomponent_to_rst:: ../src/gh/components/DF_normal_segmentator \ No newline at end of file diff --git a/doc/gh_DFCloudSizeDownsample.rst b/doc/gh_DFCloudSizeDownsample.rst new file mode 100644 index 00000000..7b4d14f4 --- /dev/null +++ b/doc/gh_DFCloudSizeDownsample.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_cloud_size_downsample/icon.png + :align: left + :width: 40px + +``DFCloudSizeDownsample`` component +=================================== + +.. ghcomponent_to_rst:: ../src/gh/components/DF_cloud_size_downsample \ No newline at end of file diff --git a/doc/gh_DFCloudUniformDownsample.rst b/doc/gh_DFCloudUniformDownsample.rst new file mode 100644 index 00000000..ba98e361 --- /dev/null +++ b/doc/gh_DFCloudUniformDownsample.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_cloud_voxel_downsample/icon.png + :align: left + :width: 40px + +``DFCloudVoxelDownsample`` component +==================================== + +.. ghcomponent_to_rst:: ../src/gh/components/DF_cloud_voxel_downsample \ No newline at end of file diff --git a/doc/gh_DFCloudVoxelDownsample.rst b/doc/gh_DFCloudVoxelDownsample.rst new file mode 100644 index 00000000..c525dc9a --- /dev/null +++ b/doc/gh_DFCloudVoxelDownsample.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_cloud_uniform_downsample/icon.png + :align: left + :width: 40px + +``DFCloudUniformDownsample`` component +====================================== + +.. ghcomponent_to_rst:: ../src/gh/components/DF_cloud_uniform_downsample \ No newline at end of file diff --git a/doc/gh_DFCsvExporter.rst b/doc/gh_DFCsvExporter.rst new file mode 100644 index 00000000..fd357411 --- /dev/null +++ b/doc/gh_DFCsvExporter.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_csv_exporter/icon.png + :align: left + :width: 40px + +``DFCsvExporter`` component +=========================== + +.. ghcomponent_to_rst:: ../src/gh/components/DF_csv_exporter \ No newline at end of file diff --git a/doc/gh_DFDeconstructAssembly.rst b/doc/gh_DFDeconstructAssembly.rst new file mode 100644 index 00000000..da1449ff --- /dev/null +++ b/doc/gh_DFDeconstructAssembly.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_deconstruct_assembly/icon.png + :align: left + :width: 40px + +``DFDeconstructAssembly`` component +=================================== + +.. ghcomponent_to_rst:: ../src/gh/components/DF_deconstruct_assembly \ No newline at end of file diff --git a/doc/gh_DFDeconstructBeam.rst b/doc/gh_DFDeconstructBeam.rst new file mode 100644 index 00000000..2ad86913 --- /dev/null +++ b/doc/gh_DFDeconstructBeam.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_deconstruct_beam/icon.png + :align: left + :width: 40px + +``DFDeconstructBeam`` component +=============================== + +.. ghcomponent_to_rst:: ../src/gh/components/DF_deconstruct_beam \ No newline at end of file diff --git a/doc/gh_DFFastGlobalRegistration.rst b/doc/gh_DFFastGlobalRegistration.rst new file mode 100644 index 00000000..453af1b9 --- /dev/null +++ b/doc/gh_DFFastGlobalRegistration.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_fast_global_registration/icon.png + :align: left + :width: 40px + +``DFFastGlobalRegistration`` component +====================================== + +.. ghcomponent_to_rst:: ../src/gh/components/DF_fast_global_registration \ No newline at end of file diff --git a/doc/gh_DFICPRegistration.rst b/doc/gh_DFICPRegistration.rst new file mode 100644 index 00000000..07673c85 --- /dev/null +++ b/doc/gh_DFICPRegistration.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_icp_registration/icon.png + :align: left + :width: 40px + +``DFICPRegistration`` component +=============================== + +.. ghcomponent_to_rst:: ../src/gh/components/DF_icp_registration \ No newline at end of file diff --git a/doc/gh_DFJointSegmentator.rst b/doc/gh_DFJointSegmentator.rst new file mode 100644 index 00000000..cbd0a797 --- /dev/null +++ b/doc/gh_DFJointSegmentator.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_joint_segmentator/icon.png + :align: left + :width: 40px + +``DFJointSegmentator`` component +================================ + +.. ghcomponent_to_rst:: ../src/gh/components/DF_joint_segmentator \ No newline at end of file diff --git a/doc/gh_DFLoadCloudFromFile.rst b/doc/gh_DFLoadCloudFromFile.rst new file mode 100644 index 00000000..e7b158fa --- /dev/null +++ b/doc/gh_DFLoadCloudFromFile.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_load_cloud_from_file/icon.png + :align: left + :width: 40px + +``DFLoadCloudFromFile`` component +================================= + +.. ghcomponent_to_rst:: ../src/gh/components/DF_load_cloud_from_file \ No newline at end of file diff --git a/doc/gh_DFLoadMeshFromFile.rst b/doc/gh_DFLoadMeshFromFile.rst new file mode 100644 index 00000000..72ec5c83 --- /dev/null +++ b/doc/gh_DFLoadMeshFromFile.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_mesh_to_cloud/icon.png + :align: left + :width: 40px + +``DFMeshToCloud`` component +=========================== + +.. ghcomponent_to_rst:: ../src/gh/components/DF_mesh_to_cloud \ No newline at end of file diff --git a/doc/gh_DFMeshToCloud.rst b/doc/gh_DFMeshToCloud.rst new file mode 100644 index 00000000..870ead93 --- /dev/null +++ b/doc/gh_DFMeshToCloud.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_load_mesh_from_file/icon.png + :align: left + :width: 40px + +``DFLoadMeshFromFile`` component +================================ + +.. ghcomponent_to_rst:: ../src/gh/components/DF_load_mesh_from_file \ No newline at end of file diff --git a/doc/gh_DFRANSACGlobalRegistration.rst b/doc/gh_DFRANSACGlobalRegistration.rst new file mode 100644 index 00000000..70d64963 --- /dev/null +++ b/doc/gh_DFRANSACGlobalRegistration.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_ransac_global_registration/icon.png + :align: left + :width: 40px + +``DFRANSACGlobalRegistration`` component +======================================== + +.. ghcomponent_to_rst:: ../src/gh/components/DF_ransac_global_registration \ No newline at end of file diff --git a/doc/gh_DFTester.rst b/doc/gh_DFTester.rst new file mode 100644 index 00000000..ca0ae10a --- /dev/null +++ b/doc/gh_DFTester.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_tester/icon.png + :align: left + :width: 40px + +``DFTester`` component +====================== + +.. ghcomponent_to_rst:: ../src/gh/components/DF_tester \ No newline at end of file diff --git a/doc/gh_DFVisualization.rst b/doc/gh_DFVisualization.rst new file mode 100644 index 00000000..e1d80707 --- /dev/null +++ b/doc/gh_DFVisualization.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_visualization/icon.png + :align: left + :width: 40px + +``DFVisualization`` component +============================= + +.. ghcomponent_to_rst:: ../src/gh/components/DF_visualization \ No newline at end of file diff --git a/doc/gh_DFVisualizationSettings.rst b/doc/gh_DFVisualizationSettings.rst new file mode 100644 index 00000000..8d403cb7 --- /dev/null +++ b/doc/gh_DFVisualizationSettings.rst @@ -0,0 +1,14 @@ +.. image:: ../src/gh/components/DF_visualization_settings/icon.png + :align: left + :width: 40px + +``DFVisualizationSettings`` component +===================================== + +.. hint:: This component is automatically adding to the canvas all the input parameters for the component. + + .. image:: _static/gh_components/gh_DFVizSettings_example.png + :align: center + :width: 800px + +.. ghcomponent_to_rst:: ../src/gh/components/DF_visualization_settings \ No newline at end of file diff --git a/doc/gh_DFXMLExporter.rst b/doc/gh_DFXMLExporter.rst new file mode 100644 index 00000000..9349b5bd --- /dev/null +++ b/doc/gh_DFXMLExporter.rst @@ -0,0 +1,8 @@ +.. image:: ../src/gh/components/DF_xml_exporter/icon.png + :align: left + :width: 40px + +``DFXMLExporter`` component +=========================== + +.. ghcomponent_to_rst:: ../src/gh/components/DF_xml_exporter \ No newline at end of file diff --git a/doc/gh_components.rst b/doc/gh_components.rst new file mode 100644 index 00000000..a85ae0b9 --- /dev/null +++ b/doc/gh_components.rst @@ -0,0 +1,102 @@ +.. _gh_dfcomp: + +DiffCheck Gh Components +======================= + +DF has a Grasshopper_ plugin with a set of components that allows the user to interact with the DF's API. All components are written in Python 3.9.1. + +.. _Grasshopper: https://www.rhino3d.com/learn/?query=kind:%20grasshopper + +.. list-table:: + :widths: 5 40 5 40 + :header-rows: 0 + + * - .. image:: ../src/gh/components/DF_build_assembly/icon.png + - `gh_DFBuildAssembly `_ + - .. image:: ../src/gh/components/DF_deconstruct_assembly/icon.png + - `gh_DFDeconstructAssembly `_ + + * - .. image:: ../src/gh/components/DF_deconstruct_beam/icon.png + - `gh_DFDeconstructBeam `_ + - .. image:: ../src/gh/components/DF_joint_segmentator/icon.png + - `gh_DFJointSegmentator `_ + + * - .. image:: ../src/gh/components/DF_load_cloud_from_file/icon.png + - `gh_DFLoadCloudFromFile `_ + - .. image:: ../src/gh/components/DF_load_mesh_from_file/icon.png + - `gh_DFLoadMeshFromFile `_ + + * - .. image:: ../src/gh/components/DF_cad_segmentator/icon.png + - `gh_DFCADSegmentator `_ + - .. image:: ../src/gh/components/DF_mesh_to_cloud/icon.png + - `gh_DFMeshToCloud `_ + + * - .. image:: ../src/gh/components/DF_normal_segmentator/icon.png + - `gh_DFCloudNormalSegmentator `_ + - .. image:: ../src/gh/components/DF_cloud_normal_estimator/icon.png + - `gh_DFCloudNormalEstimator `_ + + * - .. image:: ../src/gh/components/DF_cloud_size_downsample/icon.png + - `gh_DFCloudSizeDownsample `_ + - .. image:: ../src/gh/components/DF_cloud_uniform_downsample/icon.png + - `gh_DFCloudUniformDownsample `_ + + * - .. image:: ../src/gh/components/DF_cloud_voxel_downsample/icon.png + - `gh_DFCloudVoxelDownsample `_ + - .. image:: ../src/gh/components/DF_cloud_cloud_distance/icon.png + - `gh_DFCloudCloudDistance `_ + + * - .. image:: ../src/gh/components/DF_cloud_mesh_distance/icon.png + - `gh_DFCloudMeshDistance `_ + - .. image:: ../src/gh/components/DF_csv_exporter/icon.png + - `gh_DFCsvExporter `_ + + * - .. image:: ../src/gh/components/DF_fast_global_registration/icon.png + - `gh_DFFastGlobalRegistration `_ + - .. image:: ../src/gh/components/DF_icp_registration/icon.png + - `gh_DFICPRegistration `_ + + * - .. image:: ../src/gh/components/DF_ransac_global_registration/icon.png + - `gh_DFRANSACGlobalRegistration `_ + - .. image:: ../src/gh/components/DF_xml_exporter/icon.png + - `gh_DFXMLExporter `_ + + * - .. image:: ../src/gh/components/DF_visualization/icon.png + - `gh_DFVisualization `_ + - .. image:: ../src/gh/components/DF_visualization_settings/icon.png + - `gh_DFVisualizationSettings `_ + + * - .. image:: ../src/gh/components/DF_tester/icon.png + - `gh_DFTester `_ + - + - + + + +.. toctree:: + :maxdepth: 1 + :hidden: + + gh_DFBuildAssembly + gh_DFCADSegmentator + gh_DFCloudCloudDistance + gh_DFCloudMeshDistance + gh_DFCloudNormalEstimator + gh_DFCloudSizeDownsample + gh_DFCloudUniformDownsample + gh_DFCloudVoxelDownsample + gh_DFCsvExporter + gh_DFDeconstructAssembly + gh_DFDeconstructBeam + gh_DFFastGlobalRegistration + gh_DFICPRegistration + gh_DFJointSegmentator + gh_DFLoadCloudFromFile + gh_DFLoadMeshFromFile + gh_DFMeshToCloud + gh_DFCloudNormalSegmentator + gh_DFRANSACGlobalRegistration + gh_DFTester + gh_DFVisualization + gh_DFVisualizationSettings + gh_DFXMLExporter \ No newline at end of file diff --git a/doc/glossary.md b/doc/glossary.md new file mode 100644 index 00000000..94703bc3 --- /dev/null +++ b/doc/glossary.md @@ -0,0 +1,4 @@ +(glossary)= +# Glossary + +/// list of terms the users might not be familiar with like segmentation, registration, etc. \ No newline at end of file diff --git a/doc/index.rst b/doc/index.rst new file mode 100644 index 00000000..b9889439 --- /dev/null +++ b/doc/index.rst @@ -0,0 +1,178 @@ +.. _diffcheck_intro: + +DiffCheck: CAD-Scan comparison +===================================== + +diffCheck(DF) allows to *identify discrepancies across point clouds and 3D models of both individually machined timber pieces featuring various joints as well as fully assembled timber structures*. It can help you quantify the differences between the CAD and scanned fabricated structure, providing a comprehensive report that highlights the discrepancies. + +The software is designed to be user-friendly and can be used either via a Grasshopper plug-in or its Python API. + +The software is developed by the `Laboratory of Timber Construction (IBOIS)`_ and the `Laboratory for Creative Computation (CRCL)`_ at `Polytechnique Fédérale de Lausanne (EPFL)`_. + +.. raw:: html + +
+ diffCheck Front Image +
+ +.. grid:: 3 + :margin: 0 + :padding: 0 + :gutter: 0 + + .. grid-item-card:: Grasshoper plug-in + :columns: 12 6 6 4 + :class-card: sd-border-0 + :shadow: None + + diffCheck is distributed as a Grasshopper plug-in, allowing users to easily compare CAD models with scanned point clouds comfortably within the Rhino environment. + + .. grid-item-card:: GH documentation + :columns: 12 6 6 4 + :class-card: sd-border-0 + :shadow: None + + We provide detailed documentation on how to use the software, including installation instructions, tutorials, and components descriptions. + + .. grid-item-card:: Developer resources + :columns: 12 6 6 4 + :class-card: sd-border-0 + :shadow: None + + diffCheck is also available as a Python API, allowing developers to integrate the software into their own workflows within the Rhino ecosystem. We also welcome contributions to the project! + +.. grid:: 3 + + .. grid-item-card:: + :columns: 12 6 6 4 + :class-card: sd-border-0 + :shadow: None + + .. button-ref:: qstart + :expand: + :ref-type: myst + :color: success + :click-parent: + + :octicon:`zap;2em` Get start! + + .. grid-item-card:: + :columns: 12 6 6 4 + :class-card: sd-border-0 + :shadow: None + + .. button-ref:: gh_dfcomp + :expand: + :ref-type: myst + :color: success + :click-parent: + + :octicon:`book;2em` GHComponents + + .. grid-item-card:: + :columns: 12 6 6 4 + :class-card: sd-border-0 + :shadow: None + + .. button-ref:: dev_df_doc + :expand: + :ref-type: myst + :color: success + :click-parent: + + :octicon:`codespaces;2em` To the Dev docs + +.. grid:: 3 + :margin: 0 + :padding: 0 + :gutter: 0 + + .. grid-item-card:: Tutorials + :columns: 12 6 6 4 + :class-card: sd-border-0 + :shadow: None + + We provide a series of hands-on tutorials to help you get started with diffCheck. The tutorials are based on real timber structures and will guide you through the process of comparing CAD models with scanned point clouds. + + .. grid-item-card:: Python API + :columns: 12 6 6 4 + :class-card: sd-border-0 + :shadow: None + + If you prefer to work with Python, you can use diffCheck's Python API for Rhino. The API allows you to access the software's functionality directly from your Python scripts. + + .. grid-item-card:: Open-source + :columns: 12 6 6 4 + :class-card: sd-border-0 + :shadow: None + + diffCheck is an open-source project, and we welcome contributions from the community. If you have ideas for new features or improvements, feel free to get in touch! + +.. grid:: 3 + + .. grid-item-card:: + :columns: 12 6 6 4 + :class-card: sd-border-0 + :shadow: None + + .. button-ref:: df-tuto + :expand: + :ref-type: myst + :color: success + :click-parent: + + :octicon:`beaker;2em` Learn diffCheck + + .. grid-item-card:: + :columns: 12 6 6 4 + :class-card: sd-border-0 + :shadow: None + + .. button-ref:: diffCheck_API + :expand: + :ref-type: myst + :color: success + :click-parent: + + :octicon:`terminal;2em` DF in Python + + .. grid-item-card:: + :columns: 12 6 6 4 + :class-card: sd-border-0 + :shadow: None + + .. button-ref:: contrib_guide + :expand: + :ref-type: myst + :color: success + :click-parent: + + :octicon:`heart;2em` Contribute + +.. toctree:: + :hidden: + :maxdepth: 2 + :caption: Getting Started + + installation + quickstart + tutorials + + gh_components + + +.. toctree:: + :hidden: + :maxdepth: 3 + :caption: Further resources + + df_architecture + diffCheck_PythonAPI + dev_documentation + glossary + change_log + + +.. _Laboratory of Timber Construction (IBOIS): https://www.epfl.ch/labs/ibois/ +.. _Laboratory for Creative Computation (CRCL): https://www.epfl.ch/labs/crcl/ +.. _Polytechnique Fédérale de Lausanne (EPFL) : https://www.epfl.ch/en/ \ No newline at end of file diff --git a/doc/installation.md b/doc/installation.md new file mode 100644 index 00000000..279e6925 --- /dev/null +++ b/doc/installation.md @@ -0,0 +1,21 @@ +(installation)= +# Installing diffCheck + +Using `diffCheck` requires *Rhino* and *Grasshopper*. The plug-in can be installed simply by: + +* Open *Rhino* and tpe on the command bar: + ``` + _PackageManager + ``` +* search for `diffCheck` and click on install. +* launch *Grasshopper* you will find the a new tab in the toolbar. +* drop on the canvas the [test component](gh_DFTester) and connect it to a point cloud and a CAD model. + +

+ +

+ + +```{important} +For now, diffCheck is only supported on Windows ❖. \ No newline at end of file diff --git a/doc/key-concepts.md b/doc/key-concepts.md new file mode 100644 index 00000000..3b10c370 --- /dev/null +++ b/doc/key-concepts.md @@ -0,0 +1,4 @@ +(key-concepts)= +# Key-concepts + +/// add description here about the general structure of diffCheck with schemes and general functioning \ No newline at end of file diff --git a/doc/pre-processing.md b/doc/pre-processing.md new file mode 100644 index 00000000..ba8d693e --- /dev/null +++ b/doc/pre-processing.md @@ -0,0 +1,4 @@ +(pre-processing)= +# Pre-processing + +/// a brief intro on how to use the pre-processing section to clean the scanned point cloud \ No newline at end of file diff --git a/doc/quickstart.md b/doc/quickstart.md new file mode 100644 index 00000000..68954dd1 --- /dev/null +++ b/doc/quickstart.md @@ -0,0 +1,4 @@ +(qstart)= +# Quickstart + +/// a brief rer-intro on how to install and a basic example of a working full and small pipeline of diffCheck \ No newline at end of file diff --git a/doc/segmentation_intro.md b/doc/segmentation_intro.md new file mode 100644 index 00000000..52d9024d --- /dev/null +++ b/doc/segmentation_intro.md @@ -0,0 +1,4 @@ +(segmentation_intro)= +# Segmentation + +/// explain with simple example how to use the segmentation components of diffCheck and why we need them. It can be slit in two parts: single element - full structure \ No newline at end of file diff --git a/doc/sphinx_ghcomponent_parser/__init__.py b/doc/sphinx_ghcomponent_parser/__init__.py new file mode 100644 index 00000000..2078002d --- /dev/null +++ b/doc/sphinx_ghcomponent_parser/__init__.py @@ -0,0 +1,106 @@ +import os +import json + +from docutils import nodes +from docutils.core import publish_parts + +from docutils.parsers.rst import Directive +from sphinx.util.docutils import SphinxDirective +from docutils.statemachine import StringList + +from . import metadata_parser + +class GhComponentToRSTDirective(SphinxDirective): + has_content = False + required_arguments = 1 + + def run(self): + ghcomponent_dir = self.arguments[0] + if not os.path.isabs(ghcomponent_dir): + ghcomponent_dir = os.path.join(self.env.srcdir, ghcomponent_dir) + if not os.path.exists(ghcomponent_dir): + return [self.state_machine.reporter.warning( + f'GHComponent directory not found: {ghcomponent_dir}', line=self.lineno)] + metadata_json_file = os.path.join(ghcomponent_dir, 'metadata.json') + pycode_file = os.path.join(ghcomponent_dir, 'code.py') + icon_file = os.path.join(ghcomponent_dir, 'icon.png') + + metadata = metadata_parser.MetadataParser(metadata_json_file) + + ########################################################################## + # RST content + ########################################################################## + # to be used in toctree directive needs an unique id + section = nodes.section(ids=["211c205b-cacd-486a-b321-d0d98bf1a6c7"]) + #------------------------------------------------------------------------- + # general description + section += nodes.Text(metadata.get_description()) + #------------------------------------------------------------------------- + # input parameters + subtitle_input = nodes.subtitle() + subtitle_input_text = nodes.strong(text="Inputs:") + subtitle_input += subtitle_input_text + section += subtitle_input + + table = nodes.table() + tgroup = nodes.tgroup(cols=2) + table += tgroup + tgroup += nodes.colspec(colwidth=1) + tgroup += nodes.colspec(colwidth=1) + + tbody = nodes.tbody() + tgroup += tbody + if metadata.get_input_parameters(): + for param in metadata.get_input_parameters(): + row = nodes.row() + entry = nodes.entry() + entry += nodes.paragraph('', '', nodes.literal(text=f"{param['name']}"), nodes.Text(f" ({param['typeHintID']} ,{param['scriptParamAccess']})")) + row += entry + row += nodes.entry('', nodes.paragraph(text=param['description'])) + tbody += row + section += table + #------------------------------------------------------------------------- + # output parameters + subtitle_output = nodes.subtitle() + subtitle_output_text = nodes.strong(text="Outputs:") + subtitle_output += subtitle_output_text + section += subtitle_output + + table = nodes.table() + tgroup = nodes.tgroup(cols=2) + table += tgroup + tgroup += nodes.colspec(colwidth=1) + tgroup += nodes.colspec(colwidth=1) + + tbody = nodes.tbody() + tgroup += tbody + if metadata.get_output_parameters(): + for param in metadata.get_output_parameters(): + row = nodes.row() + row += nodes.entry('', nodes.literal(text=f"{param['name']}")) + row += nodes.entry('', nodes.paragraph(text=param['description'])) + tbody += row + section += table + #------------------------------------------------------------------------- + # code block + subtitle_code = nodes.subtitle() + subtitle_code_text = nodes.strong(text="Code:") + subtitle_code += subtitle_code_text + section += subtitle_code + + with open(pycode_file, 'r') as file: + code_block_text = file.read() + code_block = nodes.literal_block() + code_block += nodes.Text(code_block_text) + section += code_block + #------------------------------------------------------------------------- + return [section] + + +def setup(app): + app.add_directive("ghcomponent_to_rst", GhComponentToRSTDirective) + return { + 'version': '0.1', + 'parallel_read_safe': True, + 'parallel_write_safe': True, + } diff --git a/doc/sphinx_ghcomponent_parser/metadata_parser.py b/doc/sphinx_ghcomponent_parser/metadata_parser.py new file mode 100644 index 00000000..e32c4222 --- /dev/null +++ b/doc/sphinx_ghcomponent_parser/metadata_parser.py @@ -0,0 +1,107 @@ +#! python3 + +""" + This component parse the metadata.json of the GHComponent and convert it to RST format. +""" + +import json + +class MetadataParser: + def __init__(self, file_path): + self.file_path = file_path + self.data = self.load_json() + + def load_json(self): + with open(self.file_path, 'r') as file: + return json.load(file) + + def get_name(self): + return self.data.get("name", "") + + def get_nickname(self): + return self.data.get("nickname", "") + + def get_category(self): + return self.data.get("category", "") + + def get_subcategory(self): + return self.data.get("subcategory", "") + + def get_description(self): + return self.data.get("description", "") + + def get_exposure(self): + return self.data.get("exposure", 0) + + def get_instance_guid(self): + return self.data.get("instanceGuid", "") + + def get_ghpython_settings(self): + return self.data.get("ghpython", {}) + + def get_input_parameters(self): + ghpython = self.get_ghpython_settings() + return ghpython.get("inputParameters", []) + + def get_output_parameters(self): + ghpython = self.get_ghpython_settings() + return ghpython.get("outputParameters", []) + + def return_rst_content(self): + content = [] + # Uncomment these lines if you want to include additional metadata + # content.append(f"{'`' * 2}{self.get_name()}{'`' * 2}") + # content.append(f"{'#' * 2} {self.get_nickname()}") + # content.append(f"{'#' * 2} {self.get_category()}") + # content.append(f"{'#' * 2} {self.get_subcategory()}") + # content.append(f"{'#' * 2} {self.get_description()}") + # content.append(f"{'#' * 2} {self.get_exposure()}") + # content.append(f"{'#' * 2} {self.get_instance_guid()}") + # content.append(f"{'#' * 2} GHPython Settings:") + # for key, value in self.get_ghpython_settings().items(): + # content.append(f"{'#' * 3} {key}: {value}") + + + + params = self.get_input_parameters() + # if params: # Ensure params is not empty + # content.append("\n") + # content.append(".. list-table:: inputs\n :header-rows: 1\n") + # content.append(" * - parameter") + # content.append(" - description") + # content.append(" * - test") + # content.append(" - test1") + # # for param in params: + # # content.append(f" * - {param['name']}") + # # content.append(f" - {param['description']}") + + content.append("prooooooooooooooooova1") + content.append("\n") + content.append("\n") + content.append(".. list-table:: inputs\n :header-rows: 1\n") + content.append(" * - Package") + content.append(" - Version") + content.append(" * - vc") + content.append(" - 14.3=hcf57466_18") + content.append(" * - vc14_runtime") + content.append(" - 14.16.27012=hf0eaf9b_1") + content.append("\n") + content.append("prooooooooooooooooova2") + + # convert merge into one string + + + return content + + + # content.append(f"{'#' * 3} Name: {param.get('name', '')}") + # content.append(f"{'#' * 3} Nickname: {param.get('nickname', '')}") + # content.append(f"{'#' * 3} Description: {param.get('description', '')}") + # content.append(f"{'#' * 3} Optional: {param.get('optional', False)}") + # content.append(f"{'#' * 3} Allow Tree Access: {param.get('allowTreeAccess', False)}") + # content.append(f"{'#' * 3} Show Type Hints: {param.get('showTypeHints', False)}") + # content.append(f"{'#' * 3} Script Param Access: {param.get('scriptParamAccess', '')}") + # content.append(f"{'#' * 3} Wire Display: {param.get('wireDisplay', '')}") + # content.append(f"{'#' * 3} Source Count: {param.get('sourceCount', 0)}") + # content.append(f"{'#' * 3} Type Hint ID: {param.get('typeHintID', '')}") + # return content diff --git a/doc/style.md b/doc/style.md new file mode 100644 index 00000000..89d4c889 --- /dev/null +++ b/doc/style.md @@ -0,0 +1,235 @@ +(style_guide)= +# Style guide + +Here's you can find some documentations and guidelines to contribute to the source code of DF. + +--- + +(git_module)= +## Git + +(git_commit_system)= +### GitHub commit convetion +All commits need to be labeled with a tag among these: +``` +git commit -m "ADD:" <--- for adding new elements +git commit -m "FIX:" <--- for fixing (errors, typos) +git commit -m "FLASH:" <--- quick checkpoint before refactoring +git commit -m "MILESTONE:" <--- for capping moment in development +git commit -m "CAP:" <--- for for less important milestones +git commit -m "UPDATE:" <--- for moddification to the same file +git commit -m "MISC:" <--- for any other reasons to be described +git commit -m "WIP:" <--- for not finished work +git commit -m "REFACTOR:" <--- for refactored code +git commit -m "MERGE:" <--- for merging operations +``` +You can merge few tags e.g.: +``` +git commit -m "WIP-CAP: <--- for cap moment in not finished work +``` + +### Delete submodule +To delete a submodule in Win, you need to: +1. Delete the relevant section from the `.gitmodules` file. The section would look something like this: +```console +[submodule "submodule_name"] + path = submodule_path + url = submodule_url +``` +2. Stage the `.gitmodules` changes: +```console +git add .gitmodules +``` +3. (optional) Delete the relevant section from `.git/config`. The section would look something like this: +```console +[submodule "submodule_name"] + url = submodule_url +``` +4. Run `git rm --cached path_to_submodule` (no trailing slash). +5. Run `Remove-Item -Recurse -Force .git/modules/path_to_submodule`. +6. Commit the changes: +```console +git commit -m "Remove a submodule name" +``` + +--- + +(py_conv)= +# Python + +## Py sanity check +To ensure the code quality we use the following linter and type checker tools: +- [mypy](https://mypy.readthedocs.io/en/stable/index.html) for type checking. +- [Ruff](https://docs.astral.sh/ruff/) for code quality and style. + +(pyghcomp_style)= +## Python Grasshopper Components + +Here's the list of convetion for the Grasshopper components for DF in python: +* `i_` for input parameters: e.g. `i_plane` for a plane input. +* `o_` for output parameters: e.g. `o_plane` for a plane output. +* `DF` for the component name: e.g. `DF_tester` for a tester component and the name of the class should be `class DFTester(component)`. + +--- +(cpp_conv)= +# C++ + +### Naming & synthax convention +Here's the naming convention for this project: +- ` `: lowerCamelCase. +- `type PublicVariable`: public member of a class +- `type m_PrivateVariable`: Hungarian notation with UpperCamelCase for private class members. +- `static type s_StaticVariable`: Hungarian notation with UpperCamelCase for static members of class. +- `APP_SPEC`: Constants with SNAKE_UPPER_CASE. +- All the other naming uses UpperCamelCase. + +Here's an example: +```c++ +// do not use using namespace std; we specify the namespace everytime +std::foo() + +// next line graph style +void Foo() +{ + /* content */ +} + +// structure name uses UpperCamelCase +struct AnExampleStruct +{ + // structure attribute uses UpperCamelCase + const char* Name; +}; + +// class name uses UpperCamelCase +class AnExampleClass +{ +public: + AnExampleClass(const int& init); + virtual ~AnExampleClass(); + + // member functions use UpperCamelCase + void PublicMemberFunction() + { + // local variable uses lowerCamelCase + int localVariable = 0; + } + +// A field indicator to separate the functions and attributes +public: + int PublicVariable; + +// Private member function block +private: + // member functions use UpperCamelCase + void PrivateMemberFunction(); + +// Also a field indicator to separate the functions and attributes +private: + // private variables uses Hungarian notation with UpperCamelCase + int m_PrivateVariable; // m_VariableName for normal variable + static int s_Instance; // s_VariableName for static variable +}; + +// Start headers with +#pragma once + +// Start declarations with precompiled headers +#include "aiacpch.h" +``` + +### Only smart (or unique) pointers +It's 2024, we can pass on raw pointers. We use smart pointers. +```c++ +std::unique_ptr example = std::make_unique(0); +``` +Or if you really need to use an unique pointer because you don't want to transfer the ownership of the object, use a shared pointer. +```c++ +std::shared_ptr example = std::make_shared(0); +``` + +### Debugging with GDB +We use GDB for debugging. To install GDB on windows, do the following: +1. Download the MSYS2 installer from the [MSYS2 website](https://www.msys2.org/). +2. Run the installer and follow the instructions in the [MSYS2 installation guide](https://www.msys2.org/wiki/MSYS2-installation/). +3. Open the MSYS2 terminal and update the core package database: +```bash +pacman -Syu +``` +4. Install the GDB debugger: +```bash +pacman -S mingw-w64-x86_64-gdb +``` +5. Add the GDB to the system path in PATH_ENVIRONMENT: +6. Close the terminal sessions you where using and open a new one. Now you can use GDB. +```bash +gdb "path-to-executable" +``` +> use `run` to start the program and `quit` to exit the debugger. +> use `break` to set a breakpoint and `continue` to continue the execution of the program. +> use `bt` to see the backtrace of the program when a segfault occurs. + +### Doxygen +For documentation we use the [*JavaDoc" convention](https://doxygen.nl/manual/docblocks.html). +Follow [this guide for documenting the code](https://developer.lsst.io/cpp/api-docs.html). +```c++ +/** + * @brief fill a vector of TSPlanes from a yaml file containing their corners data + * @param filename path to the map.yaml file + * @param planes vector of TSPlane objects + */ +``` + +### Logging +To log use the following MACROS. All the code is contained in `log.hh` and `log.cc`. +```c++ +DIFFCHECK_INFO("test_core_info"); +DIFFCHECK_WARN("test_core_warn"); +DIFFCHECK_ERROR("test_core_error"); +DIFFCHECK_FATAL("test_core_critical"); +``` +The output is like so: +```bash +2024-03-30 12:53:29.971 ( 0.000s) [ ADF6D348] diffCheckApp.cc:24 INFO| test_core_info +2024-03-30 12:53:29.972 ( 0.000s) [ ADF6D348] diffCheckApp.cc:25 WARN| test_core_warn +2024-03-30 12:53:29.972 ( 0.000s) [ ADF6D348] diffCheckApp.cc:26 ERR| test_core_error +2024-03-30 12:53:29.972 ( 0.000s) [ ADF6D348] diffCheckApp.cc:27 FATL| test_core_critical +``` +The logging can be silenced by setting ON the option in the main `CMakeLists.txt`. +```cmake +option(SILENT_LOGGING "Do not log messages in the terminal of on." ON) +``` + +### I/O and basic datatypes +Here's how you can import point cloud from file: +```c++ +#include "diffCheck/geometry/DFPointCloud.hh" +#include "diffCheck/geometry/DFMesh.hh" + +// clouds +std::shared_ptr dfPointCloudPtr = std::make_shared(); +std::string pathMesh = R"(C:\Users\yourfilecloudpath.ply)"; +dfPointCloudPtr->LoadFromPLY(pathCloud); + +// mesh +std::shared_ptr dfMeshPtr = std::make_shared(); +std::string pathCloud = R"(C:\Users\yourfilemeshpath.ply)"; +dfMeshPtr->LoadFromPLY(pathMesh); +``` + +### Visualizer + +Clouds and mesh can be visualized like this: +```c++ +#include "diffCheck/visualizer/DFVisualizer.hh" + +// clouds +std::shared_ptr dfVisualizerPtr = std::make_shared(); +dfVisualizerPtr->AddPointCloud(dfPointCloudPtr); +dfVisualizerPtr->Run(); + +// mesh +std::shared_ptr dfVisualizerPtr = std::make_shared(); +dfVisualizerPtr->AddMesh(dfMeshPtr); +dfVisualizerPtr->Run(); +``` diff --git a/doc/test_pybind_dll_smoke.py b/doc/test_pybind_dll_smoke.py new file mode 100644 index 00000000..f6f95a19 --- /dev/null +++ b/doc/test_pybind_dll_smoke.py @@ -0,0 +1,28 @@ +""" This file contains a simple test for the Python bindings to the C++ code (dlls reading, pyd importing etc). """ + +import os +import sys + +# Import the C++ bindings +extra_dll_dir = os.path.abspath(os.path.dirname(__file__)) +print(f"extra_dll_dir: {extra_dll_dir}") +sys.path.append(extra_dll_dir) # Add this directory to the Python path +# os.add_dll_directory(extra_dll_dir) # For finding DLL dependencies on Windows +try: + import diffCheck.diffcheck_bindings as dfb # noqa: F401 +except ImportError as e: + print(f"Failed to import diffcheck_bindings: {e}") + print("Current sys.path directories:") + for path in sys.path: + print(path) + print("Current files in the directory:") + for file in os.listdir(extra_dll_dir): + print(file) + sys.exit(1) + +# def test_dfb_test_simple(): +# assert dfb.dfb_test.test() == True, "The test function should return True" + +# if __name__ == "__main__": +# pytest.main() +print(">>>>>>>>>>>>>>>>> test_pybind_dll_smoke.py succeded <<<<<<<<<<<<<<<<<<<<") diff --git a/doc/testing.rst b/doc/testing.rst new file mode 100644 index 00000000..e8f86d4c --- /dev/null +++ b/doc/testing.rst @@ -0,0 +1,104 @@ + +.. _test_guide: + +DFTesting +========= + +Ideally, if we add code to the project, we should also add tests (at least unit tests). +In df we use `CTest` as a test framework managed by Cmake in the file ``cmake/tests.cmake`` to run: + +* `c++ <#cpp_test>`_ tests with `GoogleTest`, and +* `Python <#py_test>`_ in `PyTest`. + +Tests are in the ``tests`` folder, and here's its structure: + +.. code-block:: console + + F:\DIFFCHECK\TESTS + │ allCppTests.cc + │ + ├───integration_tests <-- mainly python interfaces + │ ├───ghcomponents_tests <-- relative to gh components + │ │ .gitkeep + │ │ + │ ├───package_tests <-- relative to the pypi package + │ │ .gitkeep + │ │ + │ └───pybinds_tests <-- strictly pybinding + │ │ diffCheck.dll + │ │ diffcheck_bindings.cp39-win_amd64.pyd + │ │ Open3D.dll + │ │ test_pybind_pyver.py + │ │ test_pybind_units.py + │ + ├───test_data <-- here is where we put some .ply data + │ roof_quarter.ply + │ + └───unit_tests <-- c++ backend, one for each header + DFLog.cc + DFPointCloudTest.cc + +To run the tests, you can use the following commands: + +.. code-block:: console + + cmake -S . -B build -A x64 -DBUILD_PYTHON_MODULE=ON -DBUILD_TESTS=ON -DRUN_TESTS=ON + cmake --build build --config Release + + +.. _py_test: + +Write DF Python tests +--------------------- + +To write a test, you need to create a new file in the ``tests/integration_tests`` folder. Write a new ``.py`` test file if you are not contributing to an already existing test, and add it in the ``cmake/tests.cmake`` in the ``add_test()`` function: + +.. code-block:: cmake + + add_test(NAME PYBIND_UNIT_TEST + COMMAND ${PYTHON_EXECUTABLE} -m pytest ${CMAKE_CURRENT_SOURCE_DIR}/tests/integration_tests/pybinds_tests/test_pybind_units.py + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + ) + +Use a fixture to your needs, and write your test. Here is an example of a fixture that always load a point cloud from the ``test_data`` folder: + +.. literalinclude:: ../tests/integration_tests/pybinds_tests/test_pybind_units.py + :language: python + :pyobject: create_DFPointCloudSampleRoof + :caption: `test_pybind_units.py <../tests/integration_tests/pybinds_tests/test_pybind_units.py>`_ + +Than you can use it in your test: + +.. literalinclude:: ../tests/integration_tests/pybinds_tests/test_pybind_units.py + :language: python + :pyobject: test_DFPointCloud_apply_color + + +.. _cpp_test: + +Write DF C++ tests +------------------ + +To write a test, you need to create a new file in the ``tests/unit_tests`` folder. Next add your file in the executable ``${CPP_UNIT_TESTS}`` in the ``cmake/tests.cmake``: + +.. code-block:: cmake + + add_test(NAME PYBIND_UNIT_TEST + COMMAND ${PYTHON_EXECUTABLE} -m pytest ${CMAKE_CURRENT_SOURCE_DIR}/tests/integration_tests/pybinds_tests/test_pybind_units.py + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + ) + +Use a fixture to your needs, and write your test. Here is an example of a fixture that always load a point cloud from the ``test_data`` folder: + +.. literalinclude:: ../tests/unit_tests/DFPointCloudTest.cc + :language: cpp + :lines: 1-27 + :caption: `DFPointCloudTest.cc <../tests/unit_tests/DFPointCloudTest.cc>`_ + +and you can use it in your test: + +.. code-block:: cpp + + TEST_F(DFPointCloudTestFixture, HasColors) { + EXPECT_TRUE(dfPointCloud.HasColors()); + } \ No newline at end of file diff --git a/doc/tutorials.rst b/doc/tutorials.rst new file mode 100644 index 00000000..dec17d25 --- /dev/null +++ b/doc/tutorials.rst @@ -0,0 +1,14 @@ +.. _df-tuto: + +diffCheck tutorials +=================== + +.. toctree:: + :maxdepth: 1 + + quickstart + key-concepts + pre-processing + assembly_intro + segmentation_intro + compute_error_intro \ No newline at end of file diff --git a/environment.yml b/environment.yml index 84575bfd..a24851e4 100644 Binary files a/environment.yml and b/environment.yml differ diff --git a/favicon.ico b/favicon.ico new file mode 100644 index 00000000..d9726f92 Binary files /dev/null and b/favicon.ico differ diff --git a/invokes/flagerize.py b/invokes/flagerize.py index 25ed3f82..74220e69 100644 --- a/invokes/flagerize.py +++ b/invokes/flagerize.py @@ -5,7 +5,6 @@ import argparse import re -import typing def main( @@ -122,11 +121,11 @@ def main( if is_version_ok: print("\t[x] Correct version format") else: - print(f"\t[ ] Correct version format") + print("\t[ ] Correct version format") if is_source_populated: print(f"\t[x] Source folder is populated {args.source} with {nbr_pycode_files} components") else: - print(f"\t[ ] Source folder is populated") + print("\t[ ] Source folder is populated") if parse_errors.__len__() != 0: for error in parse_errors: @@ -148,4 +147,4 @@ def main( print("[x] Done flagerizing components.") else: print("[ ] Failed flagerizing components.") - sys.exit(1) \ No newline at end of file + sys.exit(1) diff --git a/invokes/ghcomponentize/ghcomponentizer.py b/invokes/ghcomponentize/ghcomponentizer.py index 2cf9445e..dc962b77 100644 --- a/invokes/ghcomponentize/ghcomponentizer.py +++ b/invokes/ghcomponentize/ghcomponentizer.py @@ -5,7 +5,9 @@ import re import sys import tempfile -import urllib.request, urllib.parse, urllib.error +import urllib.request +import urllib.parse +import urllib.error import zipfile from io import BytesIO @@ -134,8 +136,6 @@ def validate_source_bundle(source): ) ) - ghpython = data.get("ghpython") - if r'"""' not in python_code: python_code = r'"""{}"""{}{}'.format( data.get("description", "Generated by Componentizer"), @@ -382,7 +382,7 @@ def create_ghuser_component(source, target, version=None, prefix=None): sys.exit(-1) clr.AddReference(os.path.splitext(gh_io)[0]) - + print("[x] GH_IO assembly: {}".format(gh_io)) print("Processing component bundles:") @@ -391,4 +391,4 @@ def create_ghuser_component(source, target, version=None, prefix=None): target = os.path.join(targetdir, d + ".ghuser") print(" [ ] {}\r".format(d), end="") create_ghuser_component(source, target, args.version, args.prefix) - print(" [x] {} => {}".format(d, target)) \ No newline at end of file + print(" [x] {} => {}".format(d, target)) diff --git a/invokes/pypireize.py b/invokes/pypireize.py index 90da23b9..83817a37 100644 --- a/invokes/pypireize.py +++ b/invokes/pypireize.py @@ -4,7 +4,6 @@ import sys import argparse -import typing def main( setup_path: str, @@ -13,8 +12,8 @@ def main( setup_dir = os.path.dirname(setup_path) os.chdir(setup_dir) try: - os.system(f"python setup.py sdist") - os.system(f"python setup.py bdist_wheel") + os.system("python setup.py sdist") + os.system("python setup.py bdist_wheel") except Exception as e: print(f"Failed to build the package: {e}") return False @@ -58,4 +57,4 @@ def main( print("[x] Pypireize task completed.") else: print("[ ] Pypireize task failed.") - sys.exit(1) \ No newline at end of file + sys.exit(1) diff --git a/invokes/versionize.py b/invokes/versionize.py index ec5f6c38..5b44be25 100644 --- a/invokes/versionize.py +++ b/invokes/versionize.py @@ -1,11 +1,10 @@ -#! python3 +#! python3 import os import sys import argparse import re -import typing def main( @@ -28,7 +27,7 @@ def main( if manifest_crt_version is not None: if version <= manifest_crt_version: print(f"Version {version} is equal or smaller than the current version {manifest_crt_version}. Please provide a version number bigger than the current one.") - return + return False else: print("Could not find the current version in the manifest file.") sys.exit(1) @@ -78,8 +77,6 @@ def main( with open(path_init, "w") as f: f.write(init) - # modify the CMake file - cmake_crt_version = None # search the first "project" line with open(path_cmake, "r") as f: cmake = f.read() @@ -157,7 +154,7 @@ def main( or len(_version) < 5: is_version_ok = False parse_errors.append("Version must be in the format: Major.Minor.Patch") - + is_manifest_ok = True is_setup_ok = True is_init_ok = True @@ -202,4 +199,4 @@ def main( print("[x] Versionizer completed successfully.") else: print("[ ] Versionizer failed.") - sys.exit(1) \ No newline at end of file + sys.exit(1) diff --git a/invokes/yakerize/yakerize.py b/invokes/yakerize/yakerize.py index dd9d48e1..7f583139 100644 --- a/invokes/yakerize/yakerize.py +++ b/invokes/yakerize/yakerize.py @@ -4,7 +4,6 @@ import sys import argparse import shutil -import typing def main( gh_components_dir: str, @@ -174,4 +173,4 @@ def main( print("[x] Yakerize task completed.") else: print("[ ] Yakerize task failed.") - sys.exit(1) \ No newline at end of file + sys.exit(1) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..5b251ea0 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,35 @@ +[tool.mypy] +warn_return_any = true +warn_unused_configs = true +exclude = [ + "deps", + "src/gh/components", + "src/gh/diffCheck/setup.py", + "temp", + "doc" +] + +[[tool.mypy.overrides]] +module = [ + "Rhino.*", + "rhinoscriptsyntax.*", + "scriptcontext.*", + "Grasshopper.*", + "System.*", + "GH_IO.*", + "clr.*", + "diffcheck_bindings" +] +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "pefile" +ignore_missing_imports = true + + +[tool.ruff] +exclude = [ + "doc/", + "deps/", + "temp/" +] \ No newline at end of file diff --git a/src/diffCheckBindings.cc b/src/diffCheckBindings.cc index 55e2db7e..dcfd892e 100644 --- a/src/diffCheckBindings.cc +++ b/src/diffCheckBindings.cc @@ -23,11 +23,13 @@ PYBIND11_MODULE(diffcheck_bindings, m) { // dfb_geometry namespace //################################################################################################# - py::module_ submodule_geometry = m.def_submodule("dfb_geometry", "A submodule for the geometry classes."); + py::module_ submodule_geometry = m.def_submodule("dfb_geometry", "A submodule for wrap geometries in df."); - py::class_>(submodule_geometry, "DFPointCloud") + py::class_>(submodule_geometry, "DFPointCloud", + "A class for the point cloud representation.") .def(py::init<>()) - .def(py::init, std::vector, std::vector>()) + .def(py::init, std::vector, std::vector>(), + py::arg("points"), py::arg("colors"), py::arg("normals")) .def("compute_distance", &diffCheck::geometry::DFPointCloud::ComputeDistance, py::arg("target_cloud")) @@ -51,12 +53,13 @@ PYBIND11_MODULE(diffcheck_bindings, m) { py::arg("r"), py::arg("g"), py::arg("b")) .def("load_from_PLY", &diffCheck::geometry::DFPointCloud::LoadFromPLY) - .def("add_points", &diffCheck::geometry::DFPointCloud::AddPoints) + .def("add_points", &diffCheck::geometry::DFPointCloud::AddPoints) .def("get_tight_bounding_box", &diffCheck::geometry::DFPointCloud::GetTightBoundingBox) .def("get_axis_aligned_bounding_box", &diffCheck::geometry::DFPointCloud::GetAxixAlignedBoundingBox) - .def("get_num_points", &diffCheck::geometry::DFPointCloud::GetNumPoints) + .def("get_num_points", &diffCheck::geometry::DFPointCloud::GetNumPoints, + "Get the number of points in the point cloud.") .def("get_num_colors", &diffCheck::geometry::DFPointCloud::GetNumColors) .def("get_num_normals", &diffCheck::geometry::DFPointCloud::GetNumNormals) .def("get_center_point", &diffCheck::geometry::DFPointCloud::GetCenterPoint) @@ -75,7 +78,8 @@ PYBIND11_MODULE(diffcheck_bindings, m) { [](const diffCheck::geometry::DFPointCloud &self) { return self.Normals; }, [](diffCheck::geometry::DFPointCloud &self, const std::vector& value) { self.Normals = value; }); - py::class_>(submodule_geometry, "DFMesh") + py::class_>(submodule_geometry, "DFMesh", + "A class for the triangle mesh representation.") .def(py::init<>()) .def(py::init, std::vector, std::vector, std::vector, std::vector>()) @@ -129,7 +133,8 @@ PYBIND11_MODULE(diffcheck_bindings, m) { py::module_ submodule_registrations = m.def_submodule("dfb_registrations", "A submodule for the registration methods."); - py::class_(submodule_registrations, "DFGlobalRegistrations") + py::class_(submodule_registrations, "DFGlobalRegistrations", + "A static class for the global registration methods.") .def_static("O3DFastGlobalRegistrationFeatureMatching", &diffCheck::registrations::DFGlobalRegistrations::O3DFastGlobalRegistrationFeatureMatching, py::arg("source"), py::arg("target"), @@ -155,7 +160,8 @@ PYBIND11_MODULE(diffcheck_bindings, m) { py::arg("ransac_max_iteration") = 5000, py::arg("ransac_confidence_threshold") = 0.999); - py::class_(submodule_registrations, "DFRefinedRegistration") + py::class_(submodule_registrations, "DFRefinedRegistration", + "A static class for the refined registration methods.") .def_static("O3DICP", &diffCheck::registrations::DFRefinedRegistration::O3DICP, py::arg("source"), py::arg("target"), @@ -179,7 +185,8 @@ PYBIND11_MODULE(diffcheck_bindings, m) { py::module_ submodule_segmentation = m.def_submodule("dfb_segmentation", "A submodule for the `semantic` segmentation methods."); - py::class_(submodule_segmentation, "DFSegmentation") + py::class_(submodule_segmentation, "DFSegmentation", + "A static class for the segmentation methods.") .def_static("segment_by_normal", &diffCheck::segmentation::DFSegmentation::NormalBasedSegmentation, py::arg("point_cloud"), py::arg("normal_threshold_degree") = 20.0, diff --git a/src/gh/components/DF_CAD_segmentator/code.py b/src/gh/components/DF_CAD_segmentator/code.py index edd304ca..49ab985b 100644 --- a/src/gh/components/DF_CAD_segmentator/code.py +++ b/src/gh/components/DF_CAD_segmentator/code.py @@ -1,14 +1,10 @@ #! python3 -import System import typing -import Rhino import Rhino.Geometry as rg from ghpythonlib.componentbase import executingcomponent as component -import Grasshopper as gh -from Grasshopper.Kernel import GH_RuntimeMessageLevel as RML import diffCheck import diffCheck.df_geometries @@ -16,7 +12,6 @@ from diffCheck import df_cvt_bindings -import numpy as np class DFCADSegmentator(component): @@ -26,17 +21,7 @@ def RunScript(self, i_angle_threshold : float, i_association_threshold : float ) -> rg.PointCloud: - """ - @param i_meshes : the beams (to be converted) - @param i_angle_threshold : from 0 to 1, it's the sin value. The closer to 0 the less permissive and viceversa to 1. - @param i_association_threshold: from 0 to infinite. By default 0.5. The closer to 0 the less permissive your point - inclusion will be, the higher the value the opposite. - - @return o_clusters : the clusters of the beams - """ - # the final rhino cloud clusters associated to the beams o_clusters = [] - # the df cloud clusters df_clusters = [] # we make a deepcopy of the input clouds df_clouds = [df_cvt_bindings.cvt_rhcloud_2_dfcloud(cloud.Duplicate()) for cloud in i_clouds] @@ -66,18 +51,7 @@ def RunScript(self, angle_threshold=i_angle_threshold, association_threshold=i_association_threshold ) - - o_clusters = [df_cvt_bindings.cvt_dfcloud_2_rhcloud(cluster) for cluster in df_clusters] return o_clusters, rh_beams_meshes - -# if __name__ == "__main__": -# com = DFCADSegmentator() -# o_clusters, rh_beams_meshes = com.RunScript( -# i_clouds, -# i_assembly, -# i_angle_threshold, -# i_association_threshold -# ) \ No newline at end of file diff --git a/src/gh/components/DF_build_assembly/code.py b/src/gh/components/DF_build_assembly/code.py index 7581d872..7a9af9fc 100644 --- a/src/gh/components/DF_build_assembly/code.py +++ b/src/gh/components/DF_build_assembly/code.py @@ -4,12 +4,9 @@ import typing import Rhino -import Rhino.Geometry as rg -import scriptcontext as sc from ghpythonlib.componentbase import executingcomponent as component -import diffCheck from diffCheck.df_geometries import DFBeam, DFAssembly @@ -17,15 +14,6 @@ class DFBuildAssembly(component): def RunScript(self, i_assembly_name, i_breps : System.Collections.Generic.IList[Rhino.Geometry.Brep]): - """ - This component parse a series of breps representing a timber structure or a - timber elements into a DFAssembly object. - - :param i_assembly_name: the name of the assembly - :param i_breps: list of breps - - :return o_assembly: the DFAssembly object - """ beams: typing.List[DFBeam] = [] for brep in i_breps: beam = DFBeam.from_brep_face(brep) @@ -34,11 +22,3 @@ def RunScript(self, o_assembly = DFAssembly(beams, i_assembly_name) return o_assembly - - -# if __name__ == "__main__": -# comp = DFBuildAssembly() -# o_assembly = comp.RunScript( -# i_assembly_name, -# i_breps -# ) diff --git a/src/gh/components/DF_cloud_cloud_distance/code.py b/src/gh/components/DF_cloud_cloud_distance/code.py index de9724bf..b5f85217 100644 --- a/src/gh/components/DF_cloud_cloud_distance/code.py +++ b/src/gh/components/DF_cloud_cloud_distance/code.py @@ -1,43 +1,24 @@ #! python3 -import System import typing -import Rhino import Rhino.Geometry as rg from ghpythonlib.componentbase import executingcomponent as component -import Grasshopper as gh from Grasshopper.Kernel import GH_RuntimeMessageLevel as RML -import diffCheck from diffCheck import df_cvt_bindings from diffCheck import df_error_estimation -import diffCheck.df_util -class CloudCloudDistance(component): +class DFCloudCloudDistance(component): def RunScript(self, i_cloud_source: typing.List[rg.PointCloud], i_cloud_target: typing.List[rg.PointCloud], i_swap: bool): - """ - The cloud-to-cloud component computes the distance between each point in the source point cloud and its nearest neighbour in thr target point cloud. - - :param i_cloud_source: a list of source point cloud - :param i_cloud_target: a list of target point cloud to calculate distances to - - :return o_distances : list of calculated distances for each point - :return o_rmse: the root mean squared error between corresponding points of source and target - :return o_max_deviation: the max deviation between source and target - :return o_min_deviation: the min deviation between source and target - :return o_std_deviation: the standard deviation between source and target - :returns o_resluts: the results of the comparison all in one object - """ - if i_cloud_source is None or i_cloud_target is None: - ghenv.Component.AddRuntimeMessage(RML.Warning, "Please provide both objects of type point clouds to compare") + ghenv.Component.AddRuntimeMessage(RML.Warning, "Please provide both objects of type point clouds to compare") # noqa: F821 return None # swap @@ -54,12 +35,3 @@ def RunScript(self, o_results = df_error_estimation.df_cloud_2_df_cloud_comparison(df_cloud_source_list, df_cloud_target_list) return o_results.distances, o_results.distances_rmse, o_results.distances_max_deviation, o_results.distances_min_deviation, o_results.distances_sd_deviation, o_results - - -# if __name__ == "__main__": -# com = CloudCloudDistance() -# o_distances, o_rmse, o_max_deviation, o_min_deviation, o_std_deviation, o_results = com.RunScript( -# i_cloud_source, -# i_cloud_target, -# i_swap -# ) \ No newline at end of file diff --git a/src/gh/components/DF_cloud_mesh_distance/code.py b/src/gh/components/DF_cloud_mesh_distance/code.py index 354f13a0..0e213029 100644 --- a/src/gh/components/DF_cloud_mesh_distance/code.py +++ b/src/gh/components/DF_cloud_mesh_distance/code.py @@ -1,14 +1,10 @@ #! python3 -import System import typing -import Rhino import Rhino.Geometry as rg from ghpythonlib.componentbase import executingcomponent as component -import Grasshopper as gh -from Grasshopper.Kernel import GH_RuntimeMessageLevel as RML import diffCheck from diffCheck import df_cvt_bindings @@ -16,7 +12,7 @@ from diffCheck.df_geometries import DFBeam -class CloudMeshDistance(component): +class DFCloudMeshDistance(component): def RunScript(self, i_cloud_source: typing.List[rg.PointCloud], i_beams: typing.List[DFBeam], @@ -24,20 +20,6 @@ def RunScript(self, i_swap: bool, i_analysis_resolution): - """ - The cloud-to-mesh component computes the distance between a point cloud and a mesh - - :param i_cloud_source: a list of point clouds - :param i_beams: a list of DF beams - :param i_signed_flag: calculate the sign of the distances - :param i_swap: swap source and target - - :return o_distances : list of calculated distances for each point - :return o_rmse: the root mean square error between corresponding points of source and target - :return o_max_deviation: the max deviation between source and target - :return o_min_deviation: the min deviation between source and target - :returns o_results: the results of the comparison all in one object - """ if i_analysis_resolution is None: scalef = diffCheck.df_util.get_doc_2_meters_unitf() i_analysis_resolution = 0.1 / scalef @@ -50,14 +32,3 @@ def RunScript(self, o_result = df_error_estimation.df_cloud_2_rh_mesh_comparison(df_cloud_source_list, rh_mesh_target_list, i_signed_flag, i_swap) return o_result.distances, o_result.distances_rmse, o_result.distances_max_deviation, o_result.distances_min_deviation, o_result.distances_sd_deviation, o_result - - -# if __name__ == "__main__": -# com = CloudMeshDistance() -# o_distances, o_rmse, o_max_deviation, o_min_deviation, o_std_deviation, o_results = com.RunScript( -# i_cloud_source, -# i_beams, -# i_signed_flag, -# i_swap, -# i_analysis_resolution -# ) \ No newline at end of file diff --git a/src/gh/components/DF_cloud_normal_estimator/code.py b/src/gh/components/DF_cloud_normal_estimator/code.py index 92fa9691..8c888685 100644 --- a/src/gh/components/DF_cloud_normal_estimator/code.py +++ b/src/gh/components/DF_cloud_normal_estimator/code.py @@ -1,13 +1,9 @@ #! python3 -import System -import Rhino import Rhino.Geometry as rg from ghpythonlib.componentbase import executingcomponent as component -import diffCheck -import diffCheck.df_geometries from diffCheck import df_cvt_bindings class DFCloudNormalEstimator(component): @@ -17,14 +13,6 @@ def RunScript(self, i_radius : float = None, i_switch_mode : bool = True ): - """ - Evaluaate the n ormals of a point cloud. - - :param i_cloud: Point cloud to evaluate normals. - :i_knn: Number of nearest neighbors to consider. - :i_radius: Radius of the search. - :i_switch_mode: Switch between Open3d (true) or Cilantro (false) library. - """ o_cloud = rg.PointCloud() df_cloud = df_cvt_bindings.cvt_rhcloud_2_dfcloud(i_cloud) @@ -41,12 +29,3 @@ def RunScript(self, o_cloud = df_cvt_bindings.cvt_dfcloud_2_rhcloud(df_cloud) return o_cloud - -# if __name__ == "__main__": -# comp = DFCloudNormalEstimator() -# o_cloud = comp.RunScript( -# i_cloud, -# i_knn, -# i_radius, -# i_switch_mode -# ) \ No newline at end of file diff --git a/src/gh/components/DF_cloud_size_downsample/code.py b/src/gh/components/DF_cloud_size_downsample/code.py index c663f3a6..be261d4c 100644 --- a/src/gh/components/DF_cloud_size_downsample/code.py +++ b/src/gh/components/DF_cloud_size_downsample/code.py @@ -1,17 +1,10 @@ #! python3 -import System -import typing -import Rhino import Rhino.Geometry as rg from ghpythonlib.componentbase import executingcomponent as component -import Grasshopper as gh -from Grasshopper.Kernel import GH_RuntimeMessageLevel as RML -import diffCheck -import diffCheck.df_geometries from diffCheck import df_cvt_bindings class DFCloudSizeDownsample(component): @@ -19,23 +12,8 @@ def RunScript(self, i_cloud: rg.PointCloud, i_size: int, ) -> rg.PointCloud: - """ - Downsample a point cloud by giving the target size of the downsampled cloud. - - :param i_cloud: input point cloud - :param i_size: the size of the wished downsampled cloud - - :return o_cloud: downsampled point cloud - """ df_cloud = df_cvt_bindings.cvt_rhcloud_2_dfcloud(i_cloud) df_cloud.downsample_by_size(i_size) o_cloud = df_cvt_bindings.cvt_dfcloud_2_rhcloud(df_cloud) return [o_cloud] - -# if __name__ == "__main__": -# com = DFCloudSizeDownsample() -# o_cloud = com.RunScript( -# i_cloud, -# i_size, -# ) \ No newline at end of file diff --git a/src/gh/components/DF_cloud_uniform_downsample/code.py b/src/gh/components/DF_cloud_uniform_downsample/code.py index 39b8df15..0b07e5b1 100644 --- a/src/gh/components/DF_cloud_uniform_downsample/code.py +++ b/src/gh/components/DF_cloud_uniform_downsample/code.py @@ -1,17 +1,10 @@ #! python3 -import System -import typing -import Rhino import Rhino.Geometry as rg from ghpythonlib.componentbase import executingcomponent as component -import Grasshopper as gh -from Grasshopper.Kernel import GH_RuntimeMessageLevel as RML -import diffCheck -import diffCheck.df_geometries from diffCheck import df_cvt_bindings class DFCloudUniformDownsample(component): @@ -19,23 +12,8 @@ def RunScript(self, i_cloud: rg.PointCloud, i_every_k_points: int, ) -> rg.PointCloud: - """ - Downsample a point cloud using in a uniform way by selecting every k points to delete. - - :param i_cloud: input point cloud - :param i_every_k_points: number of every k points to delete - - :return o_cloud: downsampled point cloud - """ df_cloud = df_cvt_bindings.cvt_rhcloud_2_dfcloud(i_cloud) df_cloud.uniform_downsample(i_every_k_points) o_cloud = df_cvt_bindings.cvt_dfcloud_2_rhcloud(df_cloud) return [o_cloud] - -# if __name__ == "__main__": -# com = DFCloudUniformDownsample() -# o_cloud = com.RunScript( -# i_cloud, -# i_every_k_points, -# ) \ No newline at end of file diff --git a/src/gh/components/DF_cloud_voxel_downsample/code.py b/src/gh/components/DF_cloud_voxel_downsample/code.py index 72e8ba7a..bd4b04c6 100644 --- a/src/gh/components/DF_cloud_voxel_downsample/code.py +++ b/src/gh/components/DF_cloud_voxel_downsample/code.py @@ -1,17 +1,10 @@ #! python3 -import System -import typing -import Rhino import Rhino.Geometry as rg from ghpythonlib.componentbase import executingcomponent as component -import Grasshopper as gh -from Grasshopper.Kernel import GH_RuntimeMessageLevel as RML -import diffCheck -import diffCheck.df_geometries from diffCheck import df_cvt_bindings class DFCloudVoxelDownsample(component): @@ -19,23 +12,8 @@ def RunScript(self, i_cloud: rg.PointCloud, i_voxel_size: float, ) -> rg.PointCloud: - """ - Downsample a point cloud using a voxel grid filter. - - :param i_cloud: input point cloud - :param i_voxel_size: voxel size, the value represents the side of the voxel - - :return o_cloud: downsampled point cloud - """ df_cloud = df_cvt_bindings.cvt_rhcloud_2_dfcloud(i_cloud) df_cloud.voxel_downsample(i_voxel_size) o_cloud = df_cvt_bindings.cvt_dfcloud_2_rhcloud(df_cloud) return [o_cloud] - -# if __name__ == "__main__": -# com = DFCloudVoxelDownsample() -# o_cloud = com.RunScript( -# i_cloud, -# i_voxel_size, -# ) \ No newline at end of file diff --git a/src/gh/components/DF_cvs_exporter/code.py b/src/gh/components/DF_csv_exporter/code.py similarity index 59% rename from src/gh/components/DF_cvs_exporter/code.py rename to src/gh/components/DF_csv_exporter/code.py index d987c9f1..c64b587c 100644 --- a/src/gh/components/DF_cvs_exporter/code.py +++ b/src/gh/components/DF_csv_exporter/code.py @@ -1,37 +1,20 @@ #! python3 -import System -import typing -import Rhino -import Rhino.Geometry as rg from ghpythonlib.componentbase import executingcomponent as component -import Grasshopper as gh -from Grasshopper.Kernel import GH_RuntimeMessageLevel as RML from diffCheck.df_error_estimation import DFVizResults import csv import os -class CsvExporter(component): +class DFCsvExporter(component): def RunScript(self, i_dump: bool, i_export_dir: str, i_file_name: str, i_export_seperate_files: bool, i_result: DFVizResults): - """ - The csv-exporter component exports a list of values to a .csv file - - :param i_dump: A flag indicating whether to perform the export. - :param i_export_dir: The directory where the CSV file will be saved. - :param i_file_name: The name of the file - :param i_export_seperate_files: whether to export a different file for each part - :param i_values: A list of values to be exported. - - :return o_success: A string notifying the user for the successful export - """ if i_dump: # Ensure the export directory exists os.makedirs(i_export_dir, exist_ok=True) @@ -55,14 +38,3 @@ def RunScript(self, o_success = "Successfully exported the values" return o_success - - -# if __name__ == "__main__": -# com = CsvExporter() -# o_cvs = com.RunScript( -# i_dump, -# i_export_dir, -# i_file_name, -# i_export_seperate_files, -# i_results -# ) \ No newline at end of file diff --git a/src/gh/components/DF_cvs_exporter/icon.png b/src/gh/components/DF_csv_exporter/icon.png similarity index 100% rename from src/gh/components/DF_cvs_exporter/icon.png rename to src/gh/components/DF_csv_exporter/icon.png diff --git a/src/gh/components/DF_cvs_exporter/metadata.json b/src/gh/components/DF_csv_exporter/metadata.json similarity index 100% rename from src/gh/components/DF_cvs_exporter/metadata.json rename to src/gh/components/DF_csv_exporter/metadata.json diff --git a/src/gh/components/DF_deconstruct_assembly/code.py b/src/gh/components/DF_deconstruct_assembly/code.py index f4c0686a..241e6d28 100644 --- a/src/gh/components/DF_deconstruct_assembly/code.py +++ b/src/gh/components/DF_deconstruct_assembly/code.py @@ -1,35 +1,13 @@ #! python3 -import System -import typing -import Rhino -import Rhino.Geometry as rg -import scriptcontext as sc from ghpythonlib.componentbase import executingcomponent as component -import diffCheck -from diffCheck.df_geometries import DFBeam, DFAssembly class DFDeconstructAssembly(component): def RunScript(self, i_assembly): - """ - Deconstruct the DFAssembly into a set of df_beams objects. - - :param i_assembly: the DFAssembly object - - :return o_beams - """ o_beams = i_assembly.beams - return o_beams - - -# if __name__ == "__main__": -# comp = DFDeconstructAssembly() -# o_beams = comp.RunScript( -# i_assembly -# ) diff --git a/src/gh/components/DF_deconstruct_beam/code.py b/src/gh/components/DF_deconstruct_beam/code.py index 89f46070..d714dde6 100644 --- a/src/gh/components/DF_deconstruct_beam/code.py +++ b/src/gh/components/DF_deconstruct_beam/code.py @@ -1,31 +1,17 @@ #! python3 -import System import typing -import Rhino -import Rhino.Geometry as rg -import scriptcontext as sc from ghpythonlib.componentbase import executingcomponent as component -import diffCheck -from diffCheck.df_geometries import DFBeam, DFAssembly +from diffCheck.df_geometries import DFBeam class DFDeconstructBeam(component): def RunScript(self, i_beams : typing.List[DFBeam]): - """ - Deconstruct the DFBeam object into Rhino objects. - - :param i_beams: the DFBeam objects - - :return o_side_faces: the side joints of the beam - :return o_joint_faces: the face joints of the beam - :return o_joint_ids: the ids for each face joint - """ - o_side_faces, o_joint_faces, o_joint_ids, o_breps = [], [], [], [] + o_side_faces, o_joint_faces, o_joint_ids = [], [], [] for i_b in i_beams: o_side_faces = [f.to_brep_face() for f in i_b.side_faces] @@ -33,9 +19,3 @@ def RunScript(self, o_joint_ids = [f.joint_id for f in i_b.joint_faces] return o_side_faces, o_joint_faces, o_joint_ids - -# if __name__ == "__main__": -# comp = DFDeconstructBeam() -# o_side_faces, o_joint_faces, o_joint_ids = comp.RunScript( -# i_beams -# ) diff --git a/src/gh/components/DF_fast_global_registration/code.py b/src/gh/components/DF_fast_global_registration/code.py index 9de1420f..1012b455 100644 --- a/src/gh/components/DF_fast_global_registration/code.py +++ b/src/gh/components/DF_fast_global_registration/code.py @@ -1,13 +1,9 @@ #! python3 -import System -import typing -import Rhino import Rhino.Geometry as rg from ghpythonlib.componentbase import executingcomponent as component -import Grasshopper as gh from Grasshopper.Kernel import GH_RuntimeMessageLevel as RML import diffCheck @@ -26,29 +22,21 @@ def RunScript(self, i_iteration_number: int, i_max_tuple_count: int ) -> rg.Transform: - """ - The global registration component aligns two point clouds in a rough way. - - :param i_cloud_source: source point cloud - :param i_cloud_target: target point cloud to align to - :param i_radius_kd_search: radius for the kd search - :param i_neighbours_kd_search: number of neighbours to consider - :param i_max_corrspondence_dist: maximum correspondence distance - :param i_iteration_number: number of iterations - :param i_max_tuple_count: maximum tuple count - - :return o_x_form : transformation matrix - """ if i_cloud_source is None or i_cloud_target is None: - ghenv.Component.AddRuntimeMessage(RML.Warning, "Please provide both objects of type point clouds to align") + ghenv.Component.AddRuntimeMessage(RML.Warning, "Please provide both objects of type point clouds to align") # noqa: F821 return None # set default values - if i_radius_kd_search is None: i_radius_kd_search = 0.8 - if i_neighbours_kd_search is None: i_neighbours_kd_search = 50 - if i_max_corrspondence_dist is None: i_max_corrspondence_dist = 0.05 - if i_iteration_number is None: i_iteration_number = 128 - if i_max_tuple_count is None: i_max_tuple_count = 1000 + if i_radius_kd_search is None: + i_radius_kd_search = 0.8 + if i_neighbours_kd_search is None: + i_neighbours_kd_search = 50 + if i_max_corrspondence_dist is None: + i_max_corrspondence_dist = 0.05 + if i_iteration_number is None: + i_iteration_number = 128 + if i_max_tuple_count is None: + i_max_tuple_count = 1000 # get the working unit of the Rhino document, if other than meters, set a multiplier factor scalef = diffCheck.df_util.get_doc_2_meters_unitf() @@ -81,22 +69,9 @@ def RunScript(self, for j in range(4): rh_form[i, j] = df_xform_matrix[i, j] if rh_form == rg.Transform.Identity: - ghenv.Component.AddRuntimeMessage(RML.Warning, "The transformation matrix is identity, no transformation is applied") + ghenv.Component.AddRuntimeMessage(RML.Warning, "The transformation matrix is identity, no transformation is applied") # noqa: F821 return None o_x_form = rh_form return o_x_form - - -# if __name__ == "__main__": -# com = DFFastGlobalRegistration() -# o_x_form = com.RunScript( -# i_cloud_source, -# i_cloud_target, -# i_radius_kd_search, -# i_neighbours_kd_search, -# i_max_corrspondence_dist, -# i_iteration_number, -# i_max_tuple_count -# ) \ No newline at end of file diff --git a/src/gh/components/DF_icp_registration/code.py b/src/gh/components/DF_icp_registration/code.py index b8660049..76ebb887 100644 --- a/src/gh/components/DF_icp_registration/code.py +++ b/src/gh/components/DF_icp_registration/code.py @@ -1,13 +1,9 @@ #! python3 -import System -import typing -import Rhino import Rhino.Geometry as rg from ghpythonlib.componentbase import executingcomponent as component -import Grasshopper as gh from Grasshopper.Kernel import GH_RuntimeMessageLevel as RML import diffCheck @@ -29,27 +25,17 @@ def RunScript(self, is_t_estimate_pt2pt: bool, # valid only for 03dicp i_use_point_to_plane: bool # valid only for 03dicp ) -> rg.Transform: - """ - The global registration component aligns two point clouds in a rough way. - - :param i_cloud_source: source point cloud - :param i_cloud_target: target point cloud to align to - :param i_use_generalized_icp: if true, it uses the generalized ICP algorithm - :param i_max_corrspondence_dist: maximum correspondence distance - :param i_max_iteration: maximum number of iterations - :param is_t_estimate_pt2pt: (valid only for 03dicp) if true, it deforms the point cloud - :param i_use_point_to_plane: (valid only for 03dicp) if true, it uses point to plane registration - - :return o_x_form : transformation matrix - """ if i_cloud_source is None or i_cloud_target is None: - ghenv.Component.AddRuntimeMessage(RML.Warning, "Please provide both objects of type point clouds to align") + ghenv.Component.AddRuntimeMessage(RML.Warning, "Please provide both objects of type point clouds to align") # noqa: F821 return None # set default values - if i_use_generalized_icp is None: i_use_generalized_icp = True - if i_max_corrspondence_dist is None: i_max_corrspondence_dist = 5 - if i_max_iteration is None: i_max_iteration = 50 + if i_use_generalized_icp is None: + i_use_generalized_icp = True + if i_max_corrspondence_dist is None: + i_max_corrspondence_dist = 5 + if i_max_iteration is None: + i_max_iteration = 50 # get the working unit of the Rhino document, if other than meters, set a multiplier factor scalef = diffCheck.df_util.get_doc_2_meters_unitf() @@ -97,25 +83,9 @@ def RunScript(self, for j in range(4): rh_form[i, j] = df_xform_matrix[i, j] if rh_form == rg.Transform.Identity: - ghenv.Component.AddRuntimeMessage(RML.Warning, "The transformation matrix is identity, no transformation is applied") + ghenv.Component.AddRuntimeMessage(RML.Warning, "The transformation matrix is identity, no transformation is applied") # noqa: F821 return None - + o_x_form = rh_form return o_x_form - - -# if __name__ == "__main__": -# com = DFICPRegistration() -# o_x_form = com.RunScript( -# i_cloud_source, -# i_cloud_target, - -# i_use_generalized_icp, - -# i_max_corrspondence_dist, -# i_max_iteration, - -# is_t_estimate_pt2pt, -# i_use_point_to_plane -# ) \ No newline at end of file diff --git a/src/gh/components/DF_joint_segmentator/code.py b/src/gh/components/DF_joint_segmentator/code.py index 62ec0928..e2e77325 100644 --- a/src/gh/components/DF_joint_segmentator/code.py +++ b/src/gh/components/DF_joint_segmentator/code.py @@ -2,10 +2,8 @@ import Rhino -import diffCheck from diffCheck import diffcheck_bindings from diffCheck import df_cvt_bindings as df_cvt -import diffCheck.df_util from Grasshopper.Kernel import GH_RuntimeMessageLevel as RML from ghpythonlib.componentbase import executingcomponent as component @@ -28,7 +26,6 @@ def RunScript(self, if len(i_clusters) == 0: raise ValueError("No clusters given.") - if not isinstance(i_clusters[0], Rhino.Geometry.PointCloud): raise ValueError("The input clusters must be PointClouds.") @@ -81,10 +78,4 @@ def RunScript(self, else: ghenv.Component.AddRuntimeMessage(RML.Warning, "Some joints could not be segmented and were ignored.") - return o_joint_segments, o_transforms, o_reference_point_clouds - -# if __name__ == "__main__": -# comp = DFJointSegmentator() -# o_joint_segments, o_transforms, o_reference_point_clouds = comp.RunScript(i_clusters, i_assembly, i_angle_threshold, i_distance_threshold) -# for i in range(len(o_joint_segments)): -# o_joint_segments[i].Transform(o_transforms[i]) + return o_joint_segments, o_transforms, o_reference_point_clouds \ No newline at end of file diff --git a/src/gh/components/DF_load_cloud_from_file/code.py b/src/gh/components/DF_load_cloud_from_file/code.py index 73b45c0f..ba240697 100644 --- a/src/gh/components/DF_load_cloud_from_file/code.py +++ b/src/gh/components/DF_load_cloud_from_file/code.py @@ -1,13 +1,8 @@ #! python3 -import System - -import Rhino import Rhino.Geometry as rg from ghpythonlib.componentbase import executingcomponent as component -import Grasshopper as gh -from Grasshopper.Kernel import GH_RuntimeMessageLevel as RML from diffCheck import diffcheck_bindings from diffCheck import df_cvt_bindings @@ -16,14 +11,6 @@ class DFLoadCloudFromFile(component): def RunScript(self, i_path: str, i_scalef: float) -> rg.PointCloud: - """ - This component loads a point cloud from a .ply file. - - :param i_path: path to the .ply file - :param i_scalef: scale factor - - :return o_rh_cloud: Rhino PointCloud - """ # import and convert to Rhino Cloud df_cloud = diffcheck_bindings.dfb_geometry.DFPointCloud() df_cloud.load_from_PLY(i_path) @@ -33,9 +20,5 @@ def RunScript(self, centroid = rh_cloud.GetBoundingBox(True).Center x_form_scale = rg.Transform.Scale(centroid, i_scalef) rh_cloud.Transform(x_form_scale) - + return [rh_cloud] - -# if __name__ == "__main__": -# com = DFLoadCloudFromFile() -# o_rh_cloud = com.RunScript(i_path, i_scalef) \ No newline at end of file diff --git a/src/gh/components/DF_load_mesh_from_file/code.py b/src/gh/components/DF_load_mesh_from_file/code.py index 0eba78f6..a1122efe 100644 --- a/src/gh/components/DF_load_mesh_from_file/code.py +++ b/src/gh/components/DF_load_mesh_from_file/code.py @@ -1,13 +1,9 @@ #! python3 -import System -import Rhino import Rhino.Geometry as rg from ghpythonlib.componentbase import executingcomponent as component -import Grasshopper as gh -from Grasshopper.Kernel import GH_RuntimeMessageLevel as RML from diffCheck import diffcheck_bindings from diffCheck import df_cvt_bindings @@ -16,14 +12,6 @@ class DFLoadMeshFromFile(component): def RunScript(self, i_path: str, i_scalef: float) -> rg.Mesh: - """ - This compoonent loads a Rhino mesh from a .ply file. - - :param i_path: path to the .ply file - :param i_scalef: scale factor - - :return o_mesh: Rhino Mesh - """ # import and convert to Rhino Mesh df_mesh = diffcheck_bindings.dfb_geometry.DFMesh() df_mesh.load_from_PLY(i_path) @@ -35,7 +23,3 @@ def RunScript(self, rh_mesh.Transform(x_form_scale) return [rh_mesh] - -# if __name__ == "__main__": -# com = DFLoadMeshFromFile() -# o_rh_mesh = com.RunScript(i_path, i_scalef) \ No newline at end of file diff --git a/src/gh/components/DF_mesh_to_cloud/code.py b/src/gh/components/DF_mesh_to_cloud/code.py index 77f4314c..ca8112da 100644 --- a/src/gh/components/DF_mesh_to_cloud/code.py +++ b/src/gh/components/DF_mesh_to_cloud/code.py @@ -1,17 +1,11 @@ #! python3 -import System -import typing -import Rhino import Rhino.Geometry as rg from ghpythonlib.componentbase import executingcomponent as component -import Grasshopper as gh -from Grasshopper.Kernel import GH_RuntimeMessageLevel as RML import diffCheck -from diffCheck import diffcheck_bindings import diffCheck.df_cvt_bindings @@ -20,14 +14,6 @@ class DFMeshToCloud(component): def RunScript(self, i_mesh: rg.Mesh, i_points: int) -> rg.PointCloud: - """ - Convert a Rhino mesh to a cloud. - - :param i_mesh: mesh to convert - :param i_points: number of points to sample - - :return o_cloud: rhino cloud - """ df_mesh = diffCheck.df_cvt_bindings.cvt_rhmesh_2_dfmesh(i_mesh) df_cloud = df_mesh.sample_points_uniformly(i_points) @@ -35,4 +21,4 @@ def RunScript(self, rgpoints = [rg.Point3d(pt[0], pt[1], pt[2]) for pt in df_cloud.points] rh_cloud = rg.PointCloud(rgpoints) - return [rh_cloud] \ No newline at end of file + return [rh_cloud] diff --git a/src/gh/components/DF_normal_segmentator/code.py b/src/gh/components/DF_normal_segmentator/code.py index 65573326..9c096abf 100644 --- a/src/gh/components/DF_normal_segmentator/code.py +++ b/src/gh/components/DF_normal_segmentator/code.py @@ -1,17 +1,10 @@ #! python3 -import System -import typing -import Rhino import Rhino.Geometry as rg from ghpythonlib.componentbase import executingcomponent as component -import Grasshopper as gh -from Grasshopper.Kernel import GH_RuntimeMessageLevel as RML -import diffCheck -import diffCheck.df_geometries from diffCheck.diffcheck_bindings import dfb_segmentation from diffCheck import df_cvt_bindings @@ -26,16 +19,6 @@ def RunScript(self, i_knn_neighborhood_size=None, i_radius_neighborhood_size=None ) -> rg.PointCloud: - """ - Segment a point cloud into clusters based on normals. - - :param i_cloud: Point cloud to segment. - :param i_normal_threshold_degree: Threshold in degrees to consider a normal as a cluster. - :param i_min_cluster_size: Minimum size of a cluster. - :param i_use_knn_neighborhood: Use KNN neighborhood. - :param i_knn_neighborhood_size: Size of the KNN neighborhood. - :param i_radius_neighborhood_size: Size of the radius neighborhood. - """ o_clusters = [] df_cloud = df_cvt_bindings.cvt_rhcloud_2_dfcloud(i_cloud) @@ -61,14 +44,3 @@ def RunScript(self, ) return [df_cvt_bindings.cvt_dfcloud_2_rhcloud(cluster) for cluster in o_clusters] - -# if __name__ == "__main__": -# com = DFCloudNormalSegmentator() -# o_clusters = com.RunScript( -# i_cloud, -# i_normal_threshold_degree, -# i_min_cluster_size, -# i_use_knn_neighborhood, -# i_knn_neighborhood_size, -# i_radius_neighborhood_size -# ) \ No newline at end of file diff --git a/src/gh/components/DF_ransac_global_registration/code.py b/src/gh/components/DF_ransac_global_registration/code.py index 8042f62c..79a34f4f 100644 --- a/src/gh/components/DF_ransac_global_registration/code.py +++ b/src/gh/components/DF_ransac_global_registration/code.py @@ -1,13 +1,9 @@ #! python3 -import System -import typing -import Rhino import Rhino.Geometry as rg from ghpythonlib.componentbase import executingcomponent as component -import Grasshopper as gh from Grasshopper.Kernel import GH_RuntimeMessageLevel as RML import diffCheck @@ -29,37 +25,29 @@ def RunScript(self, i_max_iterations: int, i_confidence_threshold: float ) -> rg.Transform: - """ - The global registration component aligns two point clouds in a rough way. - - :param i_cloud_source: source point cloud - :param i_cloud_target: target point cloud to align to - :param i_radius_kd_search: radius for the kd search - :param i_neighbours_kd_search: number of neighbours to consider - :param i_max_corrspondence_dist: maximum correspondence distance - :param is_t_estimate_pt2pt: if true, it deforms the point cloud - :param i_ransac_n: number of ransac iterations - :param i_checker_dist: correspondence checker distance - :param i_similarity_threshold: similarity threshold for the correspondence - :param i_max_iterations: maximum number of iterations - :param i_confidence_threshold: confidence threshold for RANSAC - - :return o_x_form : transformation matrix - """ if i_cloud_source is None or i_cloud_target is None: - ghenv.Component.AddRuntimeMessage(RML.Warning, "Please provide both objects of type point clouds to align") + ghenv.Component.AddRuntimeMessage(RML.Warning, "Please provide both objects of type point clouds to align") # noqa: F821 return None # set default values - if i_radius_kd_search is None: i_radius_kd_search = 1 - if i_neighbours_kd_search is None: i_neighbours_kd_search = 50 - if i_max_corrspondence_dist is None: i_max_corrspondence_dist = 0.5 - if is_t_estimate_pt2pt is None: is_t_estimate_pt2pt = False - if i_ransac_n is None: i_ransac_n = 3 - if i_checker_dist is None: i_checker_dist = 0.5 - if i_similarity_threshold is None: i_similarity_threshold = 1.5 - if i_max_iterations is None: i_max_iterations = 5000 - if i_confidence_threshold is None: i_confidence_threshold = 0.999 + if i_radius_kd_search is None: + i_radius_kd_search = 1 + if i_neighbours_kd_search is None: + i_neighbours_kd_search = 50 + if i_max_corrspondence_dist is None: + i_max_corrspondence_dist = 0.5 + if is_t_estimate_pt2pt is None: + is_t_estimate_pt2pt = False + if i_ransac_n is None: + i_ransac_n = 3 + if i_checker_dist is None: + i_checker_dist = 0.5 + if i_similarity_threshold is None: + i_similarity_threshold = 1.5 + if i_max_iterations is None: + i_max_iterations = 5000 + if i_confidence_threshold is None: + i_confidence_threshold = 0.999 # get the working unit of the Rhino document, if other than meters, set a multiplier factor scalef = diffCheck.df_util.get_doc_2_meters_unitf() @@ -99,26 +87,9 @@ def RunScript(self, for j in range(4): rh_form[i, j] = df_xform_matrix[i, j] if rh_form == rg.Transform.Identity: - ghenv.Component.AddRuntimeMessage(RML.Warning, "The transformation matrix is identity, no transformation is applied") + ghenv.Component.AddRuntimeMessage(RML.Warning, "The transformation matrix is identity, no transformation is applied") # noqa: F821 return None o_x_form = rh_form return o_x_form - - -# if __name__ == "__main__": -# com = DFRANSACGlobalRegistration() -# o_x_form = com.RunScript( -# i_cloud_source, -# i_cloud_target, -# i_radius_kd_search, -# i_neighbours_kd_search, -# i_max_corrspondence_dist, -# is_t_estimate_pt2pt, -# i_ransac_n, -# i_checker_dist, -# i_similarity_threshold, -# i_max_iterations, -# i_confidence_threshold -# ) \ No newline at end of file diff --git a/src/gh/components/DF_tester/code.py b/src/gh/components/DF_tester/code.py index 8728e021..75088add 100644 --- a/src/gh/components/DF_tester/code.py +++ b/src/gh/components/DF_tester/code.py @@ -1,12 +1,8 @@ #! python3 -import System -import Rhino -import Rhino.Geometry as rg from ghpythonlib.componentbase import executingcomponent as component -import Grasshopper as gh from Grasshopper.Kernel import GH_RuntimeMessageLevel as RML import diffCheck @@ -15,21 +11,14 @@ class DFTester(component): def RunScript(self): - """ - The component test and import bind module for diffCheck. - """ - ghenv.Component.Message = f"diffCheck v: {diffCheck.__version__}" + ghenv.Component.Message = f"diffCheck v: {diffCheck.__version__}" # noqa: F821 is_binding_imported = diffcheck_bindings.dfb_test.test() if not is_binding_imported: - ghenv.Component.AddRuntimeMessage(RML.Warning, "Bindings not imported.") + ghenv.Component.AddRuntimeMessage(RML.Warning, "Bindings not imported.") # noqa: F821 else: - ghenv.Component.AddRuntimeMessage(RML.Remark, "Bindings imported.") + ghenv.Component.AddRuntimeMessage(RML.Remark, "Bindings imported.") # noqa: F821 print(f"diffCheck test: {diffCheck.df_cvt_bindings.test_bindings()}") return is_binding_imported - -# if __name__ == "__main__": -# tester = DFTester() -# tester.RunScript() diff --git a/src/gh/components/DF_visualization/code.py b/src/gh/components/DF_visualization/code.py index 9d69c93e..19874dc8 100644 --- a/src/gh/components/DF_visualization/code.py +++ b/src/gh/components/DF_visualization/code.py @@ -1,14 +1,9 @@ #! python3 -import System -import typing -import Rhino import Rhino.Geometry as rg from ghpythonlib.componentbase import executingcomponent as component -import Grasshopper as gh -from Grasshopper.Kernel import GH_RuntimeMessageLevel as RML from diffCheck import df_cvt_bindings from diffCheck import df_visualization @@ -16,23 +11,10 @@ from diffCheck.df_error_estimation import DFVizResults from diffCheck import diffcheck_bindings -class Visualization(component): +class DFVisualization(component): def RunScript(self, i_result: DFVizResults, i_viz_settings: DFVizSettings): - - """ - Adds color to the mesh or point cloud - and generates a corresponding legend and histogram - - :param i_result: a list of point clouds - :param i_viz_settings: a list of DF beams - - :return o_colored_geo: the colored mesh or point cloud - :return o_legend : the correesponding legend of the coloring - :return o_histogram : a histogram that represents the frequency of the values - """ - values, min_value, max_value = i_result.filter_values_based_on_valuetype(i_viz_settings) # check if i_result.source is a list of pointclouds or a mesh @@ -68,11 +50,3 @@ def RunScript(self, scaling_factor=i_viz_settings.histogram_scale_factor) return o_colored_geo, o_legend, o_histogram - - -# if __name__ == "__main__": -# com = Visualization() -# o_colored_geo, o_legend, o_histogram = com.RunScript( -# i_result, -# i_viz_settings -# ) \ No newline at end of file diff --git a/src/gh/components/DF_visualization_settings/code.py b/src/gh/components/DF_visualization_settings/code.py index 2995c451..b59a8c44 100644 --- a/src/gh/components/DF_visualization_settings/code.py +++ b/src/gh/components/DF_visualization_settings/code.py @@ -1,18 +1,14 @@ #! python3 -import System -import typing -import Rhino import Rhino.Geometry as rg from ghpythonlib.componentbase import executingcomponent as component -import Grasshopper as gh from Grasshopper.Kernel import GH_RuntimeMessageLevel as RML from diffCheck import df_visualization -class VisualizationSettings(component): +class DFVisualizationSettings(component): def RunScript(self, i_value_type: str, i_upper_threshold: float, @@ -22,36 +18,23 @@ def RunScript(self, i_legend_width: float, i_legend_plane: rg.Plane, i_histogram_scale_factor: float): - - """ - Compiles all the visualization settings to feed to the visualization component - - :param i_value_type: selected type indicates Which values to display. Possible values: "dist", "RMSE", "MAX", "MIN", "STD" - :param i_upper_threshold: Thresholds the values with a maximum value - :param i_lower_threshold: Thresholds the values with a minimum value - :param i_palette: Select a color palette to map the values to. Possible values: "Jet", "Rainbow", "RdPu", "Viridis" - :param i_legend_height: the total height of the legend - :param i_legend_width: the total width of the legend - :param i_legend_plane: the construction plane of the legend - :param i_histogram_scale_factor: Scales the height of the histogram with a factor - - :returns o_viz_settings: the results of the comparison all in one object - """ - - if i_palette not in ["Jet", "Rainbow", "RdPu", "Viridis"]: - ghenv.Component.AddRuntimeMessage(RML.Warning, "Possible values for i_palette are: Jet, Rainbow, RdPu, Viridis") + ghenv.Component.AddRuntimeMessage(RML.Warning, "Possible values for i_palette are: Jet, Rainbow, RdPu, Viridis") # noqa: F821 return None - + if i_value_type not in ["Dist", "RMSE", "MAX", "MIN", "STD"]: - ghenv.Component.AddRuntimeMessage(RML.Warning, "Possible values for i_value_type are: dist, RMSE, MAX, MIN, STD") + ghenv.Component.AddRuntimeMessage(RML.Warning, "Possible values for i_value_type are: dist, RMSE, MAX, MIN, STD") # noqa: F821 return None - + # set default values - if i_legend_height is None: i_legend_height = 10 - if i_legend_width is None: i_legend_width = 0.5 - if i_legend_plane is None: i_legend_plane = rg.Plane.WorldXY - if i_histogram_scale_factor is None: i_histogram_scale_factor = 0.01 + if i_legend_height is None: + i_legend_height = 10 + if i_legend_width is None: + i_legend_width = 0.5 + if i_legend_plane is None: + i_legend_plane = rg.Plane.WorldXY + if i_histogram_scale_factor is None: + i_histogram_scale_factor = 0.01 # pack settings o_viz_settings = df_visualization.DFVizSettings(i_value_type, @@ -64,16 +47,3 @@ def RunScript(self, i_histogram_scale_factor) return o_viz_settings - -# if __name__ == "__main__": -# com = VisualizationSettings() -# o_viz_settings = com.RunScript( -# i_value_type, -# i_upper_threshold, -# i_lower_threshold, -# i_palette, -# i_legend_height, -# i_legend_width, -# i_legend_plane, -# i_histogram_scale_factor -# ) \ No newline at end of file diff --git a/src/gh/components/DF_xml_exporter/code.py b/src/gh/components/DF_xml_exporter/code.py index 01ec7e76..e0953c2c 100644 --- a/src/gh/components/DF_xml_exporter/code.py +++ b/src/gh/components/DF_xml_exporter/code.py @@ -1,11 +1,6 @@ #! python3 -import System -import typing -import Rhino -import Rhino.Geometry as rg -import scriptcontext as sc from ghpythonlib.componentbase import executingcomponent as component @@ -15,14 +10,6 @@ def RunScript(self, i_dump: bool, i_export_dir, i_assembly): - """ - Export the DFAssembly to XML. - - :param i_dump: whether to dump the xml - :param i_assembly: the assembly to export - - :return o_xml: the xml string - """ # dump the xml o_xml = None xml: str = i_assembly.to_xml() @@ -31,12 +18,3 @@ def RunScript(self, o_xml = xml return o_xml - - -# if __name__ == "__main__": -# com = DFXMLExporter() -# o_xml = com.RunScript( -# i_dump, -# i_export_dir, -# i_assembly, -# ) \ No newline at end of file diff --git a/src/gh/diffCheck/diffCheck/__init__.py b/src/gh/diffCheck/diffCheck/__init__.py index cf7c1647..8202d966 100644 --- a/src/gh/diffCheck/diffCheck/__init__.py +++ b/src/gh/diffCheck/diffCheck/__init__.py @@ -1,5 +1,4 @@ import os -import sys __version__ = "0.0.24" diff --git a/src/gh/diffCheck/diffCheck/df_cvt_bindings.py b/src/gh/diffCheck/diffCheck/df_cvt_bindings.py index 8348b3cc..f4087f03 100644 --- a/src/gh/diffCheck/diffCheck/df_cvt_bindings.py +++ b/src/gh/diffCheck/diffCheck/df_cvt_bindings.py @@ -7,17 +7,18 @@ import Rhino import Rhino.Geometry as rg import scriptcontext as sc -import numpy as np -from diffCheck import diffcheck_bindings +from typing import Any -def test_bindings() -> bool: +from diffCheck import diffcheck_bindings # type: ignore + +def test_bindings() -> Any: """ Test the bindings import. :return is_imported: True if the bindings are imported, False otherwise """ - return False + return diffcheck_bindings.dfb_test.test() def cvt_rhcloud_2_dfcloud(rh_cloud) -> diffcheck_bindings.dfb_geometry.DFPointCloud: """ @@ -27,7 +28,7 @@ def cvt_rhcloud_2_dfcloud(rh_cloud) -> diffcheck_bindings.dfb_geometry.DFPointCl :return df_cloud: diffCheck cloud """ - + if not isinstance(rh_cloud, rg.PointCloud): raise ValueError("rh_cloud for convertion should be a PointCloud") @@ -230,7 +231,7 @@ def cvt_dfOBB_2_rhbrep(df_OBB) -> rg.Box: def cvt_ndarray_2_rh_transform(matrix) -> rg.Transform: """ - Convert a numpy matrix to a Rhino transformation. + Convert a numpy matrix to a Rhino transformation. Useful to transform a DFTransformation-transformation_matrix to a Rhino transformation. :param matrix: the numpy matrix @@ -254,4 +255,4 @@ def cvt_ndarray_2_rh_transform(matrix) -> rg.Transform: transfo.M31 = matrix[3, 1] transfo.M32 = matrix[3, 2] transfo.M33 = matrix[3, 3] - return transfo \ No newline at end of file + return transfo diff --git a/src/gh/diffCheck/diffCheck/df_error_estimation.py b/src/gh/diffCheck/diffCheck/df_error_estimation.py index ae2e4520..e9a1d761 100644 --- a/src/gh/diffCheck/diffCheck/df_error_estimation.py +++ b/src/gh/diffCheck/diffCheck/df_error_estimation.py @@ -4,7 +4,7 @@ """ import numpy as np -from diffCheck import diffcheck_bindings +from diffCheck import diffcheck_bindings # type: ignore import Rhino.Geometry as rg @@ -50,14 +50,14 @@ def rh_mesh_2_df_cloud_distance(source, target, signed=False): # make a Df point cloud containing all the vertices of the source rhino mesh df_pcd_from_mesh_vertices = diffcheck_bindings.dfb_geometry.DFPointCloud() df_pcd_from_mesh_vertices.points = [[pt.X, pt.Y, pt.Z] for pt in source.Vertices] - + # calculate the distances distances = np.asarray(df_pcd_from_mesh_vertices.compute_distance(target)) if signed: # build an RTree containing all the points of the target tree = rg.RTree() - for i, ver in enumerate(target.points): + for i, ver in enumerate(target.points): tree.Insert(rg.Point3d(ver[0], ver[1], ver[2]), i) for idx, p in enumerate(source.Vertices): diff --git a/src/gh/diffCheck/diffCheck/df_geometries.py b/src/gh/diffCheck/diffCheck/df_geometries.py index 60c335cd..bed4fc8f 100644 --- a/src/gh/diffCheck/diffCheck/df_geometries.py +++ b/src/gh/diffCheck/diffCheck/df_geometries.py @@ -1,7 +1,10 @@ import os from datetime import datetime from dataclasses import dataclass + import typing +from typing import Optional + import uuid import Rhino @@ -69,7 +72,7 @@ class DFFace: # just as breps a first outer loop and then inner loops of DFVertices all_loops: typing.List[typing.List[DFVertex]] - joint_id: int = None + joint_id: Optional[int] = None def __post_init__(self): if len(self.all_loops[0]) < 3: @@ -101,12 +104,14 @@ def __hash__(self): def __eq__(self, other): if isinstance(other, DFFace): - # check if + # check if return self.all_loops == other.all_loops return False @classmethod - def from_brep_face(cls, brep_face: rg.BrepFace, joint_id: int = None): + def from_brep_face(cls, + brep_face: rg.BrepFace, + joint_id: Optional[int] = None): """ Create a DFFace from a Rhino Brep face @@ -115,6 +120,7 @@ def from_brep_face(cls, brep_face: rg.BrepFace, joint_id: int = None): :return face: The DFFace object """ all_loops = [] + df_face: DFFace = cls([], joint_id) if brep_face.IsCylinder(): cls.is_cylinder = True @@ -122,7 +128,6 @@ def from_brep_face(cls, brep_face: rg.BrepFace, joint_id: int = None): return df_face for idx, loop in enumerate(brep_face.Loops): - loop_trims = loop.Trims loop_curve = loop.To3dCurve() loop_curve = loop_curve.ToNurbsCurve() loop_vertices = loop_curve.Points @@ -147,7 +152,7 @@ def to_brep_face(self): return self._rh_brepface if self.is_cylinder: - ghenv.Component.AddRuntimeMessage( + ghenv.Component.AddRuntimeMessage( # noqa: F821 RML.Warning, "The DFFace was a cylinder created from scratch \n \ , it cannot convert to brep.") @@ -178,7 +183,7 @@ def to_mesh(self): mesh_parts = Rhino.Geometry.Mesh.CreateFromBrep( self.to_brep_face().DuplicateFace(True), Rhino.Geometry.MeshingParameters.QualityRenderMesh) - + for mesh_part in mesh_parts: mesh.Append(mesh_part) mesh.Faces.ConvertQuadsToTriangles() @@ -404,7 +409,6 @@ def to_xml(self): facerhmesh_face_elem.set("v3", str(face.C)) facerhmesh_face_elem.set("v4", str(face.D)) - tree = ET.ElementTree(root) xml_string = ET.tostring(root, encoding="unicode") dom = parseString(xml_string) pretty_xml = dom.toprettyxml() diff --git a/src/gh/diffCheck/diffCheck/df_joint_detector.py b/src/gh/diffCheck/diffCheck/df_joint_detector.py index 9601eca7..c5d6d29a 100644 --- a/src/gh/diffCheck/diffCheck/df_joint_detector.py +++ b/src/gh/diffCheck/diffCheck/df_joint_detector.py @@ -2,7 +2,6 @@ import scriptcontext as sc import Rhino.Geometry as rg -import typing from dataclasses import dataclass import diffCheck.df_util @@ -89,4 +88,4 @@ def run(self): self._faces = [(face, face_ids[idx]) for idx, face in enumerate(self.brep.Faces)] - return self._faces \ No newline at end of file + return self._faces diff --git a/src/gh/diffCheck/diffCheck/df_transformations.py b/src/gh/diffCheck/diffCheck/df_transformations.py index e7bca57f..59850a36 100644 --- a/src/gh/diffCheck/diffCheck/df_transformations.py +++ b/src/gh/diffCheck/diffCheck/df_transformations.py @@ -1,6 +1,5 @@ import Rhino import Rhino.Geometry as rg -import scriptcontext as sc import numpy as np import math @@ -11,7 +10,7 @@ def get_inverse_transformation( ) -> Rhino.Geometry.Transform: """ Get the inverse of a transformation - + :param x_form: the transformation to get the inverse from :return: the inverse transformation """ @@ -36,7 +35,7 @@ def get_inverse_transformation( def pln_2_pln_world_transform(brep: Rhino.Geometry.Brep) -> Rhino.Geometry.Transform: """ Transform a brep (beam) to the world plane - + :param brep: the brep to transform :return: the transformation """ @@ -44,7 +43,7 @@ def pln_2_pln_world_transform(brep: Rhino.Geometry.Brep) -> Rhino.Geometry.Trans def _get_lowest_brep_vertex(brep) -> Rhino.Geometry.Point3d: """ Get the the vertex with the lowest y,x and z values - + :param brep: the brep to get the lowest vertex from :return: the lowest vertex """ @@ -63,7 +62,7 @@ def _get_lowest_brep_vertex(brep) -> Rhino.Geometry.Point3d: # find the longest edge of the brep edges = brep.Edges - longest_edge = None + longest_edge: Rhino.Geometry.Curve = None longest_edge_length = 0 for edge in edges: if edge.GetLength() > longest_edge_length: @@ -73,7 +72,7 @@ def _get_lowest_brep_vertex(brep) -> Rhino.Geometry.Point3d: # find biggest face face_indices = longest_edge.AdjacentFaces() faces = [brep.Faces[face_index] for face_index in face_indices] - biggest_face = None + biggest_face: Rhino.Geometry.BrepFace = None biggest_face_area = 0 for face in faces: if rg.AreaMassProperties.Compute(face).Area > biggest_face_area: diff --git a/src/gh/diffCheck/diffCheck/df_util.py b/src/gh/diffCheck/diffCheck/df_util.py index f2c4afd1..2a1b2717 100644 --- a/src/gh/diffCheck/diffCheck/df_util.py +++ b/src/gh/diffCheck/diffCheck/df_util.py @@ -1,6 +1,5 @@ import Rhino import Rhino.Geometry as rg -import rhinoscriptsyntax as rs import scriptcontext as sc import typing @@ -109,7 +108,7 @@ def compute_ordered_vertices(brep_face) -> typing.List[Rhino.Geometry.Point3d]: edges = brep_face.DuplicateEdgeCurves() edges = list(set(edges)) - edges_sorted = [] + edges_sorted: list[Rhino.Geometry.Curve] = [] while len(edges) > 0: if len(edges_sorted) == 0: edges_sorted.append(edges[0]) @@ -139,7 +138,7 @@ def compute_ordered_vertices(brep_face) -> typing.List[Rhino.Geometry.Point3d]: return sorted_vertices def get_doc_2_meters_unitf(): - """ + """ Retrieve the document unit system and get the multiplier factor to be multiplied to all the component's inputs for df functions since they are all based in meters. @@ -159,4 +158,4 @@ def get_doc_2_meters_unitf(): unit_scale = 0.3048 elif RhinoDoc.ModelUnitSystem == Rhino.UnitSystem.Yards: unit_scale = 0.9144 - return unit_scale \ No newline at end of file + return unit_scale diff --git a/src/gh/diffCheck/diffCheck/df_visualization.py b/src/gh/diffCheck/diffCheck/df_visualization.py index 681dd82c..45e9dd7a 100644 --- a/src/gh/diffCheck/diffCheck/df_visualization.py +++ b/src/gh/diffCheck/diffCheck/df_visualization.py @@ -154,6 +154,8 @@ def create_legend(min_value, max_value, palette, steps=10, plane=rg.Plane.WorldX height = total_height/steps legend_geometry = [] + rect_pts = [] + previous_color = None for i in range(steps+1): diff --git a/src/gh/diffCheck/setup.py b/src/gh/diffCheck/setup.py index 4997ae7e..34c4cf28 100644 --- a/src/gh/diffCheck/setup.py +++ b/src/gh/diffCheck/setup.py @@ -1,8 +1,4 @@ from setuptools import setup, find_packages -from setuptools import setup, Extension -from setuptools.command.build_ext import build_ext -import sys -import setuptools diff --git a/tasks.py b/tasks.py index 4409190c..a0d8590b 100644 --- a/tasks.py +++ b/tasks.py @@ -1,5 +1,6 @@ # tasks.py from invoke import task +import subprocess PATH_MANIFEST = "./manifest.yml" PATH_LOGO = "./logo.png" @@ -51,11 +52,14 @@ def flagerize(c, package_name="diffCheck"): @task def yakerize(c): path_yakerize = "./invokes/yakerize/yakerize.py" - build_yak_dir = "./build/yak" c.run(f"python {path_yakerize} \ --gh-components-dir {DIR_OUT_GHUER_COMPONENTS} \ --build-dir {DIR_OUT_YAK} \ --manifest-path {PATH_MANIFEST} \ --logo-path {PATH_LOGO} \ --license-path {PATH_LICENSE} \ - --readme-path {PATH_README}") \ No newline at end of file + --readme-path {PATH_README}") + +@task +def documentize(c): + subprocess.run("conda activate diff_check && sphinx-build -b html -v doc _build", shell=True, check=True) diff --git a/tests/integration_tests/pybinds_tests/test_pybind_dll_smoke.py b/tests/integration_tests/pybinds_tests/test_pybind_dll_smoke.py index 5d3525c5..860e0832 100644 --- a/tests/integration_tests/pybinds_tests/test_pybind_dll_smoke.py +++ b/tests/integration_tests/pybinds_tests/test_pybind_dll_smoke.py @@ -21,7 +21,7 @@ sys.exit(1) def test_dfb_test_simple(): - assert dfb.dfb_test.test() == True, "The test function should return True" + assert dfb.dfb_test.test(), "The test function should return True" if __name__ == "__main__": - pytest.main() \ No newline at end of file + pytest.main() diff --git a/tests/integration_tests/pybinds_tests/test_pybind_pyver.py b/tests/integration_tests/pybinds_tests/test_pybind_pyver.py index 5feb3d56..15bde7eb 100644 --- a/tests/integration_tests/pybinds_tests/test_pybind_pyver.py +++ b/tests/integration_tests/pybinds_tests/test_pybind_pyver.py @@ -41,4 +41,4 @@ def test_same_py_ver(): assert dll_py_ver == f"{sys.version_info.major}.{sys.version_info.minor}", "Expected DLL to be built for the same Python version as the current environment" if __name__ == "__main__": - pytest.main() \ No newline at end of file + pytest.main() diff --git a/tests/integration_tests/pybinds_tests/test_pybind_units.py b/tests/integration_tests/pybinds_tests/test_pybind_units.py index 3149c70d..d20ca9ad 100644 --- a/tests/integration_tests/pybinds_tests/test_pybind_units.py +++ b/tests/integration_tests/pybinds_tests/test_pybind_units.py @@ -78,7 +78,7 @@ def test_DFPointCloud_init(): def test_DFPointCloud_load_from_PLY(): pc = dfb.dfb_geometry.DFPointCloud() pc.load_from_PLY(get_ply_cloud_roof_quarter_path()) - + assert pc.points.__len__() == 7379, "DFPointCloud should have 7379 points" assert pc.normals.__len__() == 7379, "DFPointCloud should have 7379 normals" assert pc.colors.__len__() == 7379, "DFPointCloud should have 7379 colors" @@ -135,16 +135,16 @@ def test_DFPointCloud_properties(create_DFPointCloudSampleRoof): assert pc.get_num_normals() == 7379, "get_num_normals() should return 7379" assert pc.get_num_colors() == 7379, "get_num_colors() should return 7379" - assert pc.has_points() == True, "has_points() should return True" - assert pc.has_colors() == True, "has_colors() should return True" - assert pc.has_normals() == True, "has_normals() should return True" + assert pc.has_points(), "has_points() should return True" + assert pc.has_colors(), "has_colors() should return True" + assert pc.has_normals(), "has_normals() should return True" pc.points = [] pc.normals = [] pc.colors = [] - assert pc.has_points() == False, "has_points() should return False" - assert pc.has_colors() == False, "has_colors() should return False" - assert pc.has_normals() == False, "has_normals() should return False" + assert not pc.has_points(), "has_points() should return False" + assert not pc.has_colors(), "has_colors() should return False" + assert not pc.has_normals(), "has_normals() should return False" def test_DFPointCloud_add_points(): point_pc_1 = [(0, 0, 0)] @@ -271,7 +271,6 @@ def test_DFTransform_init(): assert t is not None, "DFTransformation should be initialized successfully" def test_DFTransform_read_write(create_DFPointCloudSampleRoof): - pc = create_DFPointCloudSampleRoof t = dfb.dfb_transformation.DFTransformation() matrix = t.transformation_matrix @@ -404,4 +403,4 @@ def test_DFPlaneSegmentation_connected_plans(create_DFPointCloudTwoConnectedPlan assert len(segments) == 2, "DFPlaneSegmentation should return 2 segments" if __name__ == "__main__": - pytest.main() \ No newline at end of file + pytest.main() diff --git a/tests/unit_tests/DFPointCloudTest.cc b/tests/unit_tests/DFPointCloudTest.cc index 2e392e3c..a70804bd 100644 --- a/tests/unit_tests/DFPointCloudTest.cc +++ b/tests/unit_tests/DFPointCloudTest.cc @@ -2,6 +2,10 @@ #include "diffCheck.hh" #include "diffCheck/IOManager.hh" +//------------------------------------------------------------------------- +// fixtures +//------------------------------------------------------------------------- + class DFPointCloudTestFixture : public ::testing::Test { protected: std::vector points; @@ -22,6 +26,8 @@ class DFPointCloudTestFixture : public ::testing::Test { } }; +// add your fixtures here.. + //------------------------------------------------------------------------- // basic constructors //------------------------------------------------------------------------- @@ -51,7 +57,6 @@ TEST_F(DFPointCloudTestFixture, LoadFromPLY) { EXPECT_EQ(dfPointCloud.GetNumPoints(), 7379); EXPECT_EQ(dfPointCloud.GetNumColors(), 7379); EXPECT_EQ(dfPointCloud.GetNumNormals(), 7379); - } //-------------------------------------------------------------------------