diff --git a/.github/actions/dependencies/action.yml b/.github/actions/dependencies/action.yml new file mode 100644 index 00000000000..68629b2ccae --- /dev/null +++ b/.github/actions/dependencies/action.yml @@ -0,0 +1,73 @@ +name: 'Dependencies' +description: 'Install dependencies from lockfile' + +inputs: + compiler: + description: 'Compiler to use' + required: false + default: '' + GITHUB_TOKEN: + description: 'Secrets to pass to the action' + required: false + default: '' + spack_version: + description: 'Version of Spack to use' + required: false + default: 'develop' + +runs: + using: 'composite' + steps: + - name: Prerequisites + if: startsWith(runner.os, 'macos') + shell: bash + run: | + brew install ninja ccache + + - name: Restore cache + id: cache-restore + uses: actions/cache/restore@v4 + with: + path: spack_cache + key: spack-r2-${{ inputs.spack_version }}-${{ hashFiles('CI/dependencies/setup_spack.sh') }} + + - name: Extract cached Spack + if: steps.cache-restore.outputs.cache-hit == 'true' + shell: bash + run: | + mv spack_cache/spack . + mv spack_cache/.spack ~ + + - name: Setup Spack from scratch + if: steps.cache-restore.outputs.cache-hit != 'true' + shell: bash + env: + SPACK_VERSION: ${{ inputs.spack_version }} + run: | + CI/dependencies/setup_spack.sh ${PWD}/spack + spack/bin/spack list > /dev/null + rm -rf spack/.git + mkdir spack_cache + cp -r spack spack_cache/ + cp -r ~/.spack spack_cache/ + + - name: Save cache + if: steps.cache-restore.outputs.cache-hit != 'true' + uses: actions/cache/save@v4 + with: + path: | + spack_cache + key: ${{ steps.cache-restore.outputs.cache-primary-key }} + + - name: Install dependencies + shell: bash + env: + GITHUB_TOKEN: ${{ inputs.GITHUB_TOKEN }} + run: | + if [ -n "${{ inputs.compiler }}" ]; then + echo "With compiler" + CI/dependencies/setup.sh -c "${{ inputs.compiler }}" + else + echo "Without compiler" + CI/dependencies/setup.sh + fi diff --git a/.github/workflows/analysis.yml b/.github/workflows/analysis.yml index f33bb927f40..a7cc7444849 100644 --- a/.github/workflows/analysis.yml +++ b/.github/workflows/analysis.yml @@ -22,7 +22,7 @@ env: CCACHE_MAXSIZE: 1.25G CCACHE_KEY_SUFFIX: r2 ACTS_LOG_FAILURE_THRESHOLD: WARNING - DEPENDENCY_URL: https://acts.web.cern.ch/ACTS/ci/ubuntu-24.04/deps.v5.tar.zst + DEPENDENCY_TAG: v7_b7 # NOTE this only builds core unittests to reduce the output size. if we # found a way to have Github actions not fail regularly with this job @@ -36,7 +36,10 @@ jobs: - uses: actions/checkout@v4 - name: Install dependencies - run: CI/dependencies.sh + uses: ./.github/actions/dependencies + with: + compiler: g++ + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Cache build uses: actions/cache@v4 @@ -53,7 +56,6 @@ jobs: --preset=github-ci -DCMAKE_BUILD_TYPE=Debug -DCMAKE_CXX_FLAGS="-Werror --coverage -g -gz -g1" - -DPython_EXECUTABLE=$(which python3) -DACTS_BUILD_ODD=OFF - name: Build run: cmake --build build @@ -97,7 +99,10 @@ jobs: - uses: actions/checkout@v4 - name: Install dependencies - run: CI/dependencies.sh + uses: ./.github/actions/dependencies + with: + compiler: g++ + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Install dependencies run: pip3 install git+https://github.com/paulgessinger/cmakeperf.git@2a409b5 diff --git a/.github/workflows/builds.yml b/.github/workflows/builds.yml index fa42481f5f6..40f0e7233a8 100644 --- a/.github/workflows/builds.yml +++ b/.github/workflows/builds.yml @@ -20,7 +20,7 @@ env: CCACHE_DIR: ${{ github.workspace }}/ccache CCACHE_MAXSIZE: 500M CCACHE_KEY_SUFFIX: r2 - DEPENDENCY_TAG: v5 + DEPENDENCY_TAG: v7_b7 jobs: linux_ubuntu: @@ -36,15 +36,15 @@ jobs: submodules: true lfs: true - - name: Set dependencies URL - run: echo "DEPENDENCY_URL=https://acts.web.cern.ch/ACTS/ci/ubuntu-24.04/deps.${DEPENDENCY_TAG}.tar.zst" >> $GITHUB_ENV - - name: Install dependencies - run: CI/dependencies.sh + uses: ./.github/actions/dependencies + with: + compiler: g++ + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Restore ccache - uses: actions/cache/restore@v4 id: ccache-restore + uses: actions/cache/restore@v4 with: path: ${{ env.CCACHE_DIR }} key: ccache-${{ runner.os }}-${{ github.job }}-${{ env.CCACHE_KEY_SUFFIX }}-${{ github.sha }} @@ -57,13 +57,10 @@ jobs: # versions # Need to set git user & email for patching to work (GeoModel plugin) run: > - git config --global user.name 'CI' && - git config --global user.email '<>' && ccache -z && cmake -B build -S . --preset=github-ci -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" - -DPython_EXECUTABLE=$(which python3) -DACTS_BUILD_PLUGIN_ONNX=ON - name: Build @@ -102,7 +99,6 @@ jobs: -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_FLAGS=-Werror - -DCMAKE_CXX_STANDARD=20 -DCMAKE_PREFIX_PATH="${INSTALL_DIR}" - name: Downstream build @@ -124,11 +120,10 @@ jobs: submodules: true lfs: true - - name: Set dependencies URL - run: echo "DEPENDENCY_URL=https://acts.web.cern.ch/ACTS/ci/ubuntu-24.04/deps.${DEPENDENCY_TAG}.tar.zst" >> $GITHUB_ENV - - name: Install dependencies - run: CI/dependencies.sh + uses: ./.github/actions/dependencies + with: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - uses: actions/download-artifact@v4 with: @@ -166,11 +161,10 @@ jobs: - run: apt-get update && apt-get install -y time - - name: Set dependencies URL - run: echo "DEPENDENCY_URL=https://acts.web.cern.ch/ACTS/ci/ubuntu-24.04/deps.${DEPENDENCY_TAG}.tar.zst" >> $GITHUB_ENV - - name: Install dependencies - run: CI/dependencies.sh + uses: ./.github/actions/dependencies + with: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - uses: actions/download-artifact@v4 with: @@ -188,20 +182,25 @@ jobs: - name: Physics performance checks shell: bash - run: > + run: | echo "::group::Dependencies" - && git config --global safe.directory "$GITHUB_WORKSPACE" - && python3 -m pip install histcmp==0.6.8 matplotlib - && python3 -m pip install -r Examples/Scripts/requirements.txt - && geant4-config --install-datasets - && venv_python=$(which python3) - && echo $venv_python - && source build/this_acts_withdeps.sh - && export PATH=$(dirname $venv_python):$PATH - && echo $PATH - && which python3 - && echo "::endgroup::" - && CI/physmon/phys_perf_mon.sh all physmon + git config --global safe.directory "$GITHUB_WORKSPACE" + python3 -m pip install histcmp==0.6.8 matplotlib + python3 -m pip install -r Examples/Scripts/requirements.txt + geant4-config --install-datasets + venv_python=$(which python3) + echo $venv_python + source build/this_acts_withdeps.sh + export PATH=$(dirname $venv_python):$PATH + which python3 + echo $PATH + echo "::endgroup::" + CI/physmon/phys_perf_mon.sh all physmon + + + - uses: mxschmitt/action-tmate@v3 + if: failure() + - name: Post step summary if: always() @@ -212,35 +211,35 @@ jobs: with: name: physmon path: physmon - - linux_physmon_perf_report: - needs: [linux_physmon] - runs-on: ubuntu-latest - if: github.ref == 'refs/heads/main' - steps: - - name: Install dependencies - run: pip3 install spyral-cli==1.1.2 - - - uses: actions/download-artifact@v4 - with: - name: physmon - path: physmon - - - name: Store metrics - env: - SSH_AUTH_SOCK: /tmp/ssh_agent.sock - run: | - ssh-agent -a $SSH_AUTH_SOCK > /dev/null - ssh-add - <<< "${{ secrets.RUNTIME_METRIC_DEPLOY_SSH_KEY }}" - git config --global user.email "action@github.com" - git config --global user.name "GitHub Action" - git clone git@github.com:acts-project/runtime_metrics.git - spyral maxima runtime_metrics/metrics.csv physmon/memory/*.csv -e $(date +%Y-%m-%dT%H-%M-%S) -e ${GITHUB_REF_NAME} -e ${GITHUB_REF} -e ${GITHUB_SHA} - cd runtime_metrics - git add -A - git commit -m"update metrics" - git push - + # + # linux_physmon_perf_report: + # needs: [linux_physmon] + # runs-on: ubuntu-latest + # if: github.ref == 'refs/heads/main' + # steps: + # - name: Install dependencies + # run: pip3 install spyral-cli==1.1.2 + # + # - uses: actions/download-artifact@v4 + # with: + # name: physmon + # path: physmon + # + # - name: Store metrics + # env: + # SSH_AUTH_SOCK: /tmp/ssh_agent.sock + # run: | + # ssh-agent -a $SSH_AUTH_SOCK > /dev/null + # ssh-add - <<< "${{ secrets.RUNTIME_METRIC_DEPLOY_SSH_KEY }}" + # git config --global user.email "action@github.com" + # git config --global user.name "GitHub Action" + # git clone git@github.com:acts-project/runtime_metrics.git + # spyral maxima runtime_metrics/metrics.csv physmon/memory/*.csv -e $(date +%Y-%m-%dT%H-%M-%S) -e ${GITHUB_REF_NAME} -e ${GITHUB_REF} -e ${GITHUB_SHA} + # cd runtime_metrics + # git add -A + # git commit -m"update metrics" + # git push + # linux_ubuntu_extra: runs-on: ubuntu-latest strategy: @@ -263,26 +262,23 @@ jobs: submodules: true lfs: true - - name: Set dependencies URL - run: echo "DEPENDENCY_URL=https://acts.web.cern.ch/ACTS/ci/ubuntu-22.04/deps.${DEPENDENCY_TAG}.tar.zst" >> $GITHUB_ENV - - name: Install dependencies - run: CI/dependencies.sh + uses: ./.github/actions/dependencies + with: + compiler: ${{ matrix.cxx }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Restore ccache uses: actions/cache/restore@v4 id: ccache-restore with: path: ${{ env.CCACHE_DIR }} - key: ccache-${{ runner.os }}-${{ github.job }}-${{ env.CCACHE_KEY_SUFFIX }}-${{ github.sha }} + key: ccache-${{ runner.os }}-${{ github.job }}-${{ env.CCACHE_KEY_SUFFIX }}-${{ matrix.image }}-${{ matrix.std }}-${{ matrix.compiler }}-${{ github.sha }} restore-keys: | - ccache-${{ runner.os }}-${{ github.job }}-${{ env.CCACHE_KEY_SUFFIX }}- + ccache-${{ runner.os }}-${{ github.job }}-${{ env.CCACHE_KEY_SUFFIX }}-${{ matrix.image }}-${{ matrix.std }}-${{ matrix.compiler }}- - name: Configure - # setting CMAKE_CXX_STANDARD=20 is a workaround for a bug in the - # dd4hep CMake configuration that gets triggered on recent CMake - # versions run: > ccache -z && cmake -B build -S . @@ -290,7 +286,6 @@ jobs: -DCMAKE_CXX_COMPILER=${{ matrix.cxx }} -DCMAKE_CXX_STANDARD=${{ matrix.std }} -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" - -DPython_EXECUTABLE=$(which python3) - name: Build run: cmake --build build @@ -321,6 +316,7 @@ jobs: -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_FLAGS=-Werror -DCMAKE_CXX_STANDARD=${{ matrix.std }} + -DCMAKE_CXX_COMPILER=${{ matrix.compiler }} -DCMAKE_PREFIX_PATH="${INSTALL_DIR}" - name: Downstream build @@ -341,16 +337,11 @@ jobs: submodules: true lfs: true - - name: Print architecture - run: uname -p - - - name: Set dependencies URL - run: echo "DEPENDENCY_URL=https://acts.web.cern.ch/ACTS/ci/macos-14/deps.${DEPENDENCY_TAG}.tar.zst" >> $GITHUB_ENV - - name: Install dependencies - run: > - brew install cmake ninja ccache xerces-c - && CI/dependencies.sh + uses: ./.github/actions/dependencies + with: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + compiler: clang++ - name: Restore ccache uses: actions/cache/restore@v4 @@ -363,12 +354,10 @@ jobs: - name: Configure run: > - ccache -z - && cmake -B build -S . + ccache -z && + cmake -B build -S . --preset=github-ci - -DCMAKE_PREFIX_PATH="${{ env.DEPENDENCY_DIR }}" -DCMAKE_INSTALL_PREFIX="${{ env.INSTALL_DIR }}" - -DPython_EXECUTABLE=$(which python3) - name: Build run: cmake --build build @@ -403,7 +392,6 @@ jobs: -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_FLAGS=-Werror - -DCMAKE_CXX_STANDARD=20 -DCMAKE_PREFIX_PATH="${INSTALL_DIR}" - name: Downstream build diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 4acc3968919..4449284b027 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -4,7 +4,10 @@ variables: CCACHE_KEY_SUFFIX: r2 CTEST_OUTPUT_ON_FAILURE: 1 - DEPENDENCY_TAG: v5 + SPACK_VERSION: develop + LOCKFILE_CACHE_DIR: ${CI_PROJECT_DIR}/spack_lockfile_cache + + DEPENDENCY_TAG: v7_b7 .ccache_base: cache: @@ -15,10 +18,30 @@ variables: paths: - ${CCACHE_DIR} +.spack_cache: + cache: + - key: + files: + - CI/dependencies/setup_spack.sh + prefix: spack_${SPACK_VERSION} + paths: + - spack + - .spack + - key: + files: + - spack_lockfile_cache/digest.txt + prefix: spack_lockfile_ + paths: + - spack_lockfile_cache + +.spack_cleanup: + after_script: + - rm -rf spack/.git spack/opt spack/var/spack/cache + - find spack -type f -name "*.pyc" -delete || true clang_tidy: stage: build - image: registry.cern.ch/ghcr.io/acts-project/ubuntu2404:63 + image: registry.cern.ch/ghcr.io/acts-project/ubuntu2404:71 tags: - large artifacts: @@ -26,8 +49,10 @@ clang_tidy: - src/clang-tidy/ when: always expire_in: 1 week - variables: - DEPENDENCY_URL: https://acts.web.cern.ch/ACTS/ci/ubuntu-24.04/deps.$DEPENDENCY_TAG.tar.zst + + cache: + - !reference [.spack_cache, cache] + script: - git clone $CLONE_URL src - cd src @@ -39,14 +64,13 @@ clang_tidy: && ln -sf /usr/bin/clang-18 /usr/bin/clang && ln -sf /usr/bin/clang-tidy-18 /usr/bin/clang-tidy - - source CI/dependencies.sh + - source CI/dependencies/setup.sh -c clang++ - > cmake -B build -S . --preset=gitlab-ci-clangtidy -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_C_COMPILER=clang - -DPython_EXECUTABLE=$(which python3) # Main clang-tidy run during cmake compilation - CI/clang_tidy/run_clang_tidy.sh clang-tidy build @@ -69,16 +93,18 @@ clang_tidy: # Generate an html report - codereport clang-tidy/clang-tidy.json clang-tidy/html + after_script: + - !reference [.spack_cleanup, after_script] + build_exatrkx_cpu: stage: build - image: registry.cern.ch/ghcr.io/acts-project/ubuntu2204_exatrkx:63 - variables: - DEPENDENCY_URL: https://acts.web.cern.ch/ACTS/ci/ubuntu-22.04/deps.$DEPENDENCY_TAG.tar.zst + image: registry.cern.ch/ghcr.io/acts-project/ubuntu2204_exatrkx:71 tags: - large cache: - !reference [.ccache_base, cache] + - !reference [.spack_cache, cache] script: - export PATH=/usr/local/sbin:/usr/sbin:/sbin:$PATH @@ -88,30 +114,31 @@ build_exatrkx_cpu: - git clone $CLONE_URL src - cd src - git checkout $HEAD_SHA - - source CI/dependencies.sh + - source CI/dependencies/setup.sh -c g++ - cd .. - mkdir build # Here we only do a minimal build without examples to save resources - > cmake -B build -S src --preset=gitlab-ci-exatrkx - -DPython_EXECUTABLE=$(which python3) -DACTS_EXATRKX_ENABLE_CUDA=OFF - ccache -z - cmake --build build -- -j6 - ccache -s + after_script: + - !reference [.spack_cleanup, after_script] + build_exatrkx: stage: build - image: registry.cern.ch/ghcr.io/acts-project/ubuntu2204_exatrkx:63 - variables: - DEPENDENCY_URL: https://acts.web.cern.ch/ACTS/ci/ubuntu-22.04/deps.$DEPENDENCY_TAG.tar.zst + image: registry.cern.ch/ghcr.io/acts-project/ubuntu2204_exatrkx:71 tags: - large cache: - !reference [.ccache_base, cache] + - !reference [.spack_cache, cache] artifacts: paths: @@ -129,54 +156,62 @@ build_exatrkx: - git clone $CLONE_URL src - cd src - git checkout $HEAD_SHA - - source CI/dependencies.sh + - source CI/dependencies/setup.sh -c g++ - cd .. - mkdir build - > cmake -B build -S src --preset=gitlab-ci-exatrkx - -DPython_EXECUTABLE=$(which python3) -DCMAKE_CUDA_ARCHITECTURES="75;86" - ccache -z - cmake --build build -- -j6 - ccache -s + after_script: + - !reference [.spack_cleanup, after_script] + test_exatrkx_unittests: stage: test needs: - build_exatrkx - image: registry.cern.ch/ghcr.io/acts-project/ubuntu2204_exatrkx:63 - variables: - DEPENDENCY_URL: https://acts.web.cern.ch/ACTS/ci/ubuntu-22.04/deps.$DEPENDENCY_TAG.tar.zst + image: registry.cern.ch/ghcr.io/acts-project/ubuntu2204_exatrkx:71 tags: - docker-gpu-nvidia + + cache: + - !reference [.spack_cache, cache] script: - apt-get update -y - git clone $CLONE_URL src - cd src - git checkout $HEAD_SHA - - source CI/dependencies.sh + - source CI/dependencies/setup.sh -c g++ - cd .. - ctest --test-dir build -R "(ExaTrkX|ConnectedComponentsCuda)" + after_script: + - !reference [.spack_cleanup, after_script] + test_exatrkx_python: stage: test needs: - build_exatrkx - image: registry.cern.ch/ghcr.io/acts-project/ubuntu2204_exatrkx:63 - variables: - DEPENDENCY_URL: https://acts.web.cern.ch/ACTS/ci/ubuntu-22.04/deps.$DEPENDENCY_TAG.tar.zst + image: registry.cern.ch/ghcr.io/acts-project/ubuntu2204_exatrkx:71 tags: - docker-gpu-nvidia + + cache: + - !reference [.spack_cache, cache] + script: - apt-get update -y - git clone $CLONE_URL src - cd src - git checkout $HEAD_SHA - nvidia-smi - - source CI/dependencies.sh + - source CI/dependencies/setup.sh -c g++ - source ../build/this_acts_withdeps.sh - python3 -m pip install -r Examples/Python/tests/requirements.txt - echo $PYTHONPATH @@ -186,13 +221,15 @@ test_exatrkx_python: - pytest -rFsv -k torch --collect-only - pytest -rFsv -k gpu-torch # For now only test torch GPU pipeline + after_script: + - !reference [.spack_cleanup, after_script] + build_gnn_tensorrt: stage: build image: ghcr.io/acts-project/ubuntu2404_tensorrt:74 - variables: - DEPENDENCY_URL: https://acts.web.cern.ch/ACTS/ci/ubuntu-24.04/deps.$DEPENDENCY_TAG.tar.zst cache: + - !reference [.spack_cache, cache] - !reference [.ccache_base, cache] tags: @@ -202,7 +239,7 @@ build_gnn_tensorrt: - git clone $CLONE_URL src - cd src - git checkout $HEAD_SHA - - source CI/dependencies.sh + - source CI/dependencies/setup.sh -c g++ - cd .. - mkdir build - > @@ -211,21 +248,21 @@ build_gnn_tensorrt: -DACTS_EXATRKX_ENABLE_TORCH=OFF -DACTS_EXATRKX_ENABLE_CUDA=ON -DACTS_EXATRKX_ENABLE_TENSORRT=ON - -DPython_EXECUTABLE=$(which python3) -DCMAKE_CUDA_ARCHITECTURES="75;86" - ccache -z - cmake --build build -- -j6 - ccache -s + after_script: + - !reference [.spack_cleanup, after_script] build_linux_ubuntu: stage: build - image: registry.cern.ch/ghcr.io/acts-project/ubuntu2404:63 - variables: - DEPENDENCY_URL: https://acts.web.cern.ch/ACTS/ci/ubuntu-24.04/deps.$DEPENDENCY_TAG.tar.zst + image: registry.cern.ch/ghcr.io/acts-project/ubuntu2404:71 cache: - !reference [.ccache_base, cache] + - !reference [.spack_cache, cache] artifacts: paths: @@ -244,14 +281,15 @@ build_linux_ubuntu: - git checkout $HEAD_SHA - git submodule init - git submodule update - - source CI/dependencies.sh - cd .. + + - source src/CI/dependencies/setup.sh -c g++ + - mkdir build - > cmake -B build -S src --preset=gitlab-ci - -DPython_EXECUTABLE=$(which python3) -DACTS_BUILD_PLUGIN_ONNX=ON - ccache -z @@ -278,13 +316,17 @@ build_linux_ubuntu: # Downstream run - ./build-downstream/bin/ShowActsVersion + after_script: + - !reference [.spack_cleanup, after_script] + linux_test_examples: stage: test - image: registry.cern.ch/ghcr.io/acts-project/ubuntu2404:63 - variables: - DEPENDENCY_URL: https://acts.web.cern.ch/ACTS/ci/ubuntu-24.04/deps.$DEPENDENCY_TAG.tar.zst + image: registry.cern.ch/ghcr.io/acts-project/ubuntu2404:71 needs: [build_linux_ubuntu] + cache: + - !reference [.spack_cache, cache] + script: - apt-get update && apt-get install -y git-lfs @@ -293,21 +335,22 @@ linux_test_examples: - git checkout $HEAD_SHA - git submodule init - git submodule update - - source CI/dependencies.sh + - source CI/dependencies/setup.sh -c g++ - cd .. - geant4-config --install-datasets - source build/this_acts_withdeps.sh - cd src - python3 -m pip install -r Examples/Python/tests/requirements.txt - - pytest -rFsv -k "not exatrkx" -v -s + - pytest -rFsv -k "test_full_chain_odd_example_pythia_geant4" -v -s + + after_script: + - !reference [.spack_cleanup, after_script] linux_physmon: stage: test - image: registry.cern.ch/ghcr.io/acts-project/ubuntu2404:63 - variables: - DEPENDENCY_URL: https://acts.web.cern.ch/ACTS/ci/ubuntu-24.04/deps.$DEPENDENCY_TAG.tar.zst needs: [build_linux_ubuntu] + image: registry.cern.ch/ghcr.io/acts-project/ubuntu2404:71 artifacts: when: always @@ -315,6 +358,9 @@ linux_physmon: - src/physmon expire_in: 1 week + cache: + - !reference [.spack_cache, cache] + script: - apt-get update && apt-get install -y git-lfs time @@ -323,7 +369,7 @@ linux_physmon: - git checkout $HEAD_SHA - git submodule init - git submodule update - - source CI/dependencies.sh + - source CI/dependencies/setup.sh -c g++ - cd .. - git config --global safe.directory "$GITHUB_WORKSPACE" @@ -339,6 +385,9 @@ linux_physmon: - cd src - CI/physmon/phys_perf_mon.sh all physmon + after_script: + - !reference [.spack_cleanup, after_script] + ############################### ### UBUNTU EXTRA JOB MATRIX ### ############################### @@ -346,12 +395,12 @@ linux_physmon: .linux_ubuntu_extra: variables: INSTALL_DIR: ${CI_PROJECT_DIR}/install - CXX: g++ stage: build cache: - !reference [.ccache_base, cache] + - !reference [.spack_cache, cache] script: - git clone $CLONE_URL src @@ -360,14 +409,13 @@ linux_physmon: - git checkout $HEAD_SHA - git submodule init - git submodule update - - source CI/dependencies.sh + - source CI/dependencies/setup.sh -c ${CXX} - cd .. - mkdir build - > cmake -B build -S src --preset=gitlab-ci - -DPython_EXECUTABLE=$(which python3) -DCMAKE_CXX_STANDARD=${CXXSTD} -DCMAKE_CXX_COMPILER=${CXX} @@ -375,11 +423,11 @@ linux_physmon: - cmake --build build -- -j6 - ccache -s - - ctest --test-dir build -j$(nproc) + - ctest --test-dir build #-j$(nproc) - cmake --build build --target integrationtests # Install main project - - cmake --install build + - cmake --install build > install.log # Downstream configure - > @@ -388,6 +436,7 @@ linux_physmon: -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_FLAGS=-Werror -DCMAKE_CXX_STANDARD=${CXXSTD} + -DCMAKE_CXX_COMPILER=${CXX} -DCMAKE_PREFIX_PATH="${INSTALL_DIR}" # Downstream build @@ -396,11 +445,14 @@ linux_physmon: # Downstream run - ./build-downstream/bin/ShowActsVersion + after_script: + - !reference [.spack_cleanup, after_script] + linux_ubuntu_2204: extends: .linux_ubuntu_extra variables: + CXX: g++ CXXSTD: 20 - DEPENDENCY_URL: https://acts.web.cern.ch/ACTS/ci/ubuntu-22.04/deps.$DEPENDENCY_TAG.tar.zst image: registry.cern.ch/ghcr.io/acts-project/ubuntu2204:71 linux_ubuntu_2204_clang: @@ -408,7 +460,6 @@ linux_ubuntu_2204_clang: variables: CXX: clang++ CXXSTD: 20 - DEPENDENCY_URL: https://acts.web.cern.ch/ACTS/ci/ubuntu-22.04/deps.$DEPENDENCY_TAG.tar.zst image: registry.cern.ch/ghcr.io/acts-project/ubuntu2204:71 @@ -417,7 +468,7 @@ linux_ubuntu_2204_clang: ###################### .lcg_base_job: - image: registry.cern.ch/ghcr.io/acts-project/${OS}-base:63 + image: registry.cern.ch/ghcr.io/acts-project/${OS}-base:71 stage: build tags: - cvmfs @@ -466,7 +517,7 @@ linux_ubuntu_2204_clang: - cmake --build build -- -j6 - ccache -s - - ctest --test-dir build -j$(nproc) + - ctest --test-dir build #-j$(nproc) lcg_105: diff --git a/CI/dependencies.sh b/CI/dependencies.sh deleted file mode 100755 index 7c8879b499e..00000000000 --- a/CI/dependencies.sh +++ /dev/null @@ -1,103 +0,0 @@ -#!/bin/bash - -function run() { - set -x - "$@" - { set +x; } 2> /dev/null -} - -function set_env { - key="$1" - value="$2" - - echo "=> ${key}=${value}" - - if [ -n "${GITHUB_ACTIONS:-}" ]; then - echo "${key}=${value}" >> $GITHUB_ENV - else - export ${key}=${value} - fi -} - -url=${1:-${DEPENDENCY_URL:-}} - -if [ -n "${GITHUB_ACTIONS:-}" ]; then - destination="${GITHUB_WORKSPACE}/dependencies" -elif [ -n "${GITLAB_CI:-}" ];then - destination="${CI_PROJECT_DIR}/dependencies" -else - destination=${2} -fi - -set_env DEPENDENCY_DIR "${destination}" - -if [ -z "${url}" ]; then - echo "url is not set" - exit 1 -fi - -echo "URL: $url" -echo "DESTINATION: $destination" - -# check curl location -CURL=$(command -v curl) -if [ -z "$CURL" ]; then - echo "curl is not available" - exit 1 -fi - -UNZSTD=$(command -v unzstd) -if [ -z "$UNZSTD" ]; then - echo "unzstd is not available" - exit 1 -fi - -TAR=$(command -v tar) -if [ -z "$TAR" ]; then - echo "tar is not available" - exit 1 -fi - -run mkdir -p "${destination}" - -run $CURL \ - --retry 5 \ - --connect-timeout 2 \ - --location $url \ - | unzstd \ - | tar \ - -x \ - --strip-components=1 \ - --directory "${destination}" - -# Patch up geant4-config data install script -out=$(${destination}/bin/geant4-config --datasets) -line=$(echo "$out" | head -n1) -orig_share=$(echo "$line" | perl -pe 's|.*?(\/.*)\/share.*|\1|') -orig_share_escaped=$(echo $orig_share|perl -pe 's|/|\\/|g') -destination_escaped=$(echo "$destination"|perl -pe 's|/|\\/|g') -perl -pi.bak -e "s/$orig_share_escaped/$destination_escaped/g" ${destination}/bin/geant4-config - -if [ -n "${GITHUB_ACTIONS:-}" ]; then - echo "Running in GitHub Actions" - venv="${GITHUB_WORKSPACE}/venv" -fi - -if [ -n "${GITLAB_CI:-}" ];then - echo "Running in GitLab CI" - venv="${CI_PROJECT_DIR}/venv" -fi - -if [ -n "${CI:-}" ];then - run "${destination}/bin/python3" -m venv "${venv}" - run "${venv}/bin/python3" -m pip install pyyaml jinja2 - set_env PATH "${venv}/bin:${destination}/bin/:${PATH}" -fi - -set_env CMAKE_PREFIX_PATH "${destination}" -set_env LD_LIBRARY_PATH "${destination}/lib" -set_env ROOT_INCLUDE_PATH "${destination}/include" -# Geant4 puts CLHEP in a subdirectory -set_env ROOT_INCLUDE_PATH "${destination}/include/Geant4" -# Pythia8 looks for settings in this directory -set_env PYTHIA8DATA "${destination}/share/Pythia8/xmldoc" diff --git a/CI/dependencies/opengl.sh b/CI/dependencies/opengl.sh new file mode 100755 index 00000000000..9e42da472cd --- /dev/null +++ b/CI/dependencies/opengl.sh @@ -0,0 +1,59 @@ +#!/bin/bash +# This file locates the the OpenGL library and adds it to the packages.yaml file. +# OpenGL needs to be supplied to spack as an *external*, which means that spack uses +# it in dependency resolution, but does not attempt to build it. + +set -u +set -e + +packages_file=$(spack location -r)/etc/spack/packages.yaml +echo "Packages file: $packages_file" +stat "$packages_file" || true + +if ! command -v sudo &> /dev/null +then + SUDO="" +else + SUDO="sudo" +fi + +os=$(spack arch --family) + +echo "OS: $os" + +if [[ "$os" == *ubuntu* ]]; then + ${SUDO} apt-get update + ${SUDO} apt-get install -y libgl1-mesa-dev + +if [[ "$os" == *ubuntu24* ]]; then + version="4.6" +elif [[ "$os" == *ubuntu20* ]]; then + version="4.5" +else + echo "Unknown OS version, default OpenGL version" + version="4.5" +fi + +cat < "$packages_file" +packages: + opengl: + buildable: false + externals: + - prefix: /usr/ + spec: opengl@${version} +EOF +cat "$packages_file" +elif [[ "$os" == *almalinux* ]]; then + ${SUDO} dnf install -y mesa-libGLU +cat < "$packages_file" +packages: + opengl: + buildable: false + externals: + - prefix: /usr/ + spec: opengl@4.6 +EOF +cat "$packages_file" +else [[ "$os" == *darwin* ]] + echo "Nothing to do on Darwin" +fi diff --git a/CI/dependencies/parallel.sh b/CI/dependencies/parallel.sh new file mode 100755 index 00000000000..4ec4ece0f1f --- /dev/null +++ b/CI/dependencies/parallel.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash +set -euo pipefail + +N=$1 +shift +declare -a pids=() + +# Start all processes in the background +for i in $(seq 1 "$N"); do + # Replace `sleep 10` with the actual command you want to run. + # For demonstration, we are using a command that sleeps for 10 seconds. + # Make sure it runs in the background with '&'. + "$@" & + pids+=($!) +done + +# Wait for all processes to finish, if any fails, kill them all +for pid in "${pids[@]}"; do + if ! wait "$pid"; then + echo "Process $pid failed. Terminating all remaining processes..." + # Kill all started processes + kill "${pids[@]}" 2>/dev/null || true + exit 1 + fi +done + +echo "All processes completed successfully." +exit 0 diff --git a/CI/dependencies/select_lockfile.py b/CI/dependencies/select_lockfile.py new file mode 100755 index 00000000000..c73a9ec2314 --- /dev/null +++ b/CI/dependencies/select_lockfile.py @@ -0,0 +1,284 @@ +#!/usr/bin/env python3 + +import os +import argparse +import json +import urllib.request +import urllib.error +import re +import subprocess +import hashlib +import tempfile +from pathlib import Path +from typing import Tuple, Dict, Optional +import contextlib + +# Modify the default cache dir to use a temporary directory +DEFAULT_CACHE_SIZE_LIMIT = 1 * 1024 * 1024 # 1MB + + +def compute_cache_key(url: str) -> str: + """Compute a cache key for a URL""" + return hashlib.sha256(url.encode()).hexdigest() + + +def compute_cache_digest(cache_dir: Path) -> str: + """Compute a digest of all cache files except digest.txt""" + files = sorted( + f + for f in os.listdir(cache_dir) + if (cache_dir / f).is_file() and f != "digest.txt" + ) + + digest = hashlib.sha256() + for fname in files: + fpath = cache_dir / fname + digest.update(fname.encode()) + digest.update(str(fpath.stat().st_size).encode()) + digest.update(fpath.read_bytes()) + return digest.hexdigest() + + +def update_cache_digest(cache_dir: Path): + """Update the cache digest file""" + digest = compute_cache_digest(cache_dir) + (cache_dir / "digest.txt").write_text(digest) + + +def prune_cache(cache_dir: Optional[Path], size_limit: int): + """Prune the cache to keep it under the size limit""" + if cache_dir is None or not cache_dir.exists(): + return + + # Get all cache files with their modification times + cache_files = [ + (cache_dir / f, (cache_dir / f).stat().st_mtime) + for f in os.listdir(cache_dir) + if (cache_dir / f).is_file() + and f != "digest.txt" # Exclude digest from pruning + ] + total_size = sum(f.stat().st_size for f, _ in cache_files) + + if total_size <= size_limit: + return + + # Sort by modification time (oldest first) + cache_files.sort(key=lambda x: x[1]) + + # Remove files until we're under the limit + for file_path, _ in cache_files: + if total_size <= size_limit: + break + total_size -= file_path.stat().st_size + file_path.unlink() + + # Update digest after pruning + update_cache_digest(cache_dir) + + +def fetch_github(base_url: str, cache_dir: Optional[Path], cache_limit: int) -> bytes: + headers = {} + token = os.environ.get("GITHUB_TOKEN") + if token is not None and token != "": + headers["Authorization"] = f"token {token}" + + with contextlib.ExitStack() as stack: + if cache_dir is not None: + cache_dir.mkdir(parents=True, exist_ok=True) + else: + cache_dir = Path(stack.enter_context(tempfile.TemporaryDirectory())) + + # Check cache first + cache_key = compute_cache_key(base_url) + cache_file = cache_dir / cache_key + + if cache_file.exists(): + print("Cache hit on", base_url) + return cache_file.read_bytes() + else: + print("Cache miss on", base_url) + + try: + req = urllib.request.Request(base_url, headers=headers) + with urllib.request.urlopen(req) as response: + content = response.read() + + # Write to cache + cache_file.write_bytes(content) + + # Update digest after adding new file + update_cache_digest(cache_dir) + + # Prune cache if necessary (this will update digest again if pruning occurs) + prune_cache(cache_dir, cache_limit) + + return content + except urllib.error.URLError as e: + print(f"Failed to fetch from {base_url}: {e}") + exit(1) + except json.JSONDecodeError as e: + print(f"Failed to parse JSON response: {e}") + exit(1) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("--tag", type=str, required=True, help="Tag to use") + parser.add_argument("--arch", type=str, required=True, help="Architecture to use") + parser.add_argument( + "--compiler-binary", + type=str, + default=os.environ.get("CXX"), + help="Compiler to use (defaults to CXX environment variable if set)", + ) + parser.add_argument( + "--compiler", + type=str, + default=None, + help="Compiler to use (defaults to compiler binary if set)", + ) + parser.add_argument( + "--output", + type=str, + default=None, + help="Output file to write lockfile to", + ) + parser.add_argument( + "--cache-dir", + type=lambda x: Path(x).expanduser() if x else None, + default=os.environ.get("LOCKFILE_CACHE_DIR"), + help="Directory to use for caching (defaults to LOCKFILE_CACHE_DIR env var)", + ) + parser.add_argument( + "--cache-limit", + type=int, + default=int(os.environ.get("LOCKFILE_CACHE_LIMIT", DEFAULT_CACHE_SIZE_LIMIT)), + help="Cache size limit in bytes (defaults to LOCKFILE_CACHE_LIMIT env var)", + ) + args = parser.parse_args() + + print("Fetching lockfiles for tag:", args.tag) + print("Architecture:", args.arch) + + base_url = f"https://api.github.com/repos/acts-project/ci-dependencies/releases/tags/{args.tag}" + + data = json.loads(fetch_github(base_url, args.cache_dir, args.cache_limit)) + + lockfiles = parse_assets(data) + + print("Available lockfiles:") + for arch, compilers in lockfiles.items(): + print(f"> {arch}:") + for c, (n, _) in compilers.items(): + print(f" - {c}: {n}") + + if args.arch not in lockfiles: + print(f"No lockfile found for architecture {args.arch}") + exit(1) + + if args.compiler_binary is not None: + compiler = determine_compiler_version(args.compiler_binary) + print("Compiler:", args.compiler_binary, f"{compiler}") + elif args.compiler is not None: + if not re.match(r"^([\w-]+)@(\d+\.\d+\.\d+)$", args.compiler): + print(f"Invalid compiler format: {args.compiler}") + exit(1) + compiler = args.compiler + print("Compiler:", f"{compiler}") + else: + compiler = None + + lockfile = select_lockfile(lockfiles, args.arch, compiler) + + print("Selected lockfile:", lockfile) + + if args.output: + with open(args.output, "wb") as f: + f.write(fetch_github(lockfile, args.cache_dir, args.cache_limit)) + + +def parse_assets(data: Dict) -> Dict[str, Dict[str, Tuple[str, str]]]: + lockfiles: Dict[str, Dict[str, Tuple[str, str]]] = {} + + for asset in data["assets"]: + url = asset["browser_download_url"] + + name = asset["name"] + if not name.endswith(".lock") or not name.startswith("spack_"): + continue + + m = re.match(r"spack_(.*(?:aarch64|x86_64))(?:_(.*))?\.lock", name) + if m is None: + continue + + arch, compiler = m.groups() + compiler = compiler if compiler else "default" + lockfiles.setdefault(arch, {})[compiler] = (name, url) + + return lockfiles + + +def select_lockfile( + lockfiles: Dict[str, Dict[str, Tuple[str, str]]], arch: str, compiler: Optional[str] +): + # Default to the default lockfile + _, lockfile = lockfiles[arch]["default"] + + if compiler is None: + return lockfile + + # Extract compiler family and version + compiler_family = compiler.split("@")[0] + + # Find all matching compiler families + matching_compilers = { + comp: ver + for comp, ver in lockfiles[arch].items() + if comp != "default" and comp.split("@")[0] == compiler_family + } + + if matching_compilers: + if compiler in matching_compilers: + # Exact match found + _, lockfile = matching_compilers[compiler] + else: + # Find highest version of same compiler family + highest_version = max( + matching_compilers.keys(), + key=lambda x: [int(v) for v in x.split("@")[1].split(".")], + ) + _, lockfile = matching_compilers[highest_version] + + return lockfile + + +def determine_compiler_version(binary: str): + try: + result = subprocess.run([binary, "--version"], capture_output=True, text=True) + + line = result.stdout.split("\n", 1)[0] + print(line) + if "clang" in line: + compiler = "clang" + if "Apple" in line: + compiler = "apple-clang" + elif "gcc" in line or "GCC" in line or "g++" in line: + compiler = "gcc" + else: + print(f"Unknown compiler: {binary}") + exit(1) + + m = re.search(r"(\d+\.\d+\.\d+)", line) + if m is None: + print(f"Failed to determine version for compiler: {binary}") + exit(1) + (version,) = m.groups() + return f"{compiler}@{version}" + + except (subprocess.SubprocessError, FileNotFoundError): + print(f"Failed to determine version for compiler: {binary}") + exit(1) + + +if __name__ == "__main__": + main() diff --git a/CI/dependencies/setup.sh b/CI/dependencies/setup.sh new file mode 100755 index 00000000000..438aee639c4 --- /dev/null +++ b/CI/dependencies/setup.sh @@ -0,0 +1,217 @@ +#!/bin/bash +set -e +set -u + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +export SPACK_COLOR=always + +function set_env { + key="$1" + value="$2" + + echo "=> ${key}=${value}" + + if [ -n "${GITHUB_ACTIONS:-}" ]; then + echo "${key}=${value}" >> "$GITHUB_ENV" + else + export "${key}"="${value}" + fi +} + +function start_section() { + local section_name="$1" + if [ -n "${GITHUB_ACTIONS:-}" ]; then + echo "::group::${section_name}" + else + echo "${section_name}" + fi +} + +function end_section() { + if [ -n "${GITHUB_ACTIONS:-}" ]; then + echo "::endgroup::" + fi +} + +# Parse command line arguments +while getopts "c:t:d:h" opt; do + case ${opt} in + c ) + compiler=$OPTARG + ;; + t ) + tag=$OPTARG + ;; + d ) + destination=$OPTARG + ;; + h ) + echo "Usage: $0 [-c compiler] [-t tag] [-d destination]" + echo "Options:" + echo " -c Specify compiler (defaults to CXX env var)" + echo " -t Specify dependency tag (defaults to DEPENDENCY_TAG env var)" + echo " -d Specify install destination (defaults based on CI environment)" + echo " -h Show this help message" + exit 0 + ;; + \? ) + echo "Invalid option: -$OPTARG" 1>&2 + exit 1 + ;; + : ) + echo "Option -$OPTARG requires an argument" 1>&2 + exit 1 + ;; + esac +done + +# Set defaults if not specified +if [ -z "${compiler:-}" ]; then + compiler="${CXX:-default}" +fi + +if [ -z "${tag:-}" ]; then + tag="${DEPENDENCY_TAG:-}" + if [ -z "${tag:-}" ]; then + echo "No tag specified via -t or DEPENDENCY_TAG environment variable" + exit 1 + fi +fi + +if [ -z "${destination:-}" ]; then + if [ -n "${GITHUB_ACTIONS:-}" ]; then + destination="${GITHUB_WORKSPACE}/dependencies" + elif [ -n "${GITLAB_CI:-}" ]; then + destination="${CI_PROJECT_DIR}/dependencies" + else + echo "No destination specified via -d and not running in CI" + exit 1 + fi +fi + + + +echo "Install tag: $tag" +echo "Install destination: $destination" + +mkdir -p ${destination} + +if [ -n "${GITLAB_CI:-}" ]; then + _spack_folder=${CI_PROJECT_DIR}/spack +else + _spack_folder=${PWD}/spack +fi + +start_section "Install spack if not already installed" +if ! command -v spack &> /dev/null; then + "${SCRIPT_DIR}/setup_spack.sh" "${_spack_folder}" + source "${_spack_folder}/share/spack/setup-env.sh" +fi +end_section + +if [ -n "${GITLAB_CI:-}" ]; then + # Use the project spack config for GitLab CI so we can cache it + mkdir -p ${CI_PROJECT_DIR}/.spack + ln -s ${CI_PROJECT_DIR}/.spack ${HOME}/.spack +fi + + + +if [ -n "${CI:-}" ]; then + start_section "Add buildcache mirror" + mirror_name="acts-spack-buildcache" + mirror_url="oci://ghcr.io/acts-project/spack-buildcache" + if [ -n "${GITLAB_CI:-}" ]; then + # Use CERN mirror for non-Github Actions + mirror_url="oci://registry.cern.ch/ghcr.io/acts-project/spack-buildcache" + fi + + # Check if this buildcache is already configured + if ! spack mirror list | grep -q ${mirror_name}; then + echo "Adding buildcache ${mirror_name}" + spack mirror add ${mirror_name} ${mirror_url} --unsigned + fi + end_section + + start_section "Locate OpenGL" + "${SCRIPT_DIR}/opengl.sh" + end_section +fi + +start_section "Get spack lock file" +arch=$(spack arch --family) + +env_dir="${destination}/env" +view_dir="${destination}/view" +mkdir -p ${env_dir} + +lock_file_path="${destination}/spack.lock" +cmd=( + "${SCRIPT_DIR}/select_lockfile.py" + "--tag" "${tag}" + "--arch" "${arch}" + "--output" "${lock_file_path}" +) + +if [ "${compiler}" != "default" ]; then + cmd+=("--compiler-binary" "${compiler}") +fi + +"${cmd[@]}" + +end_section + + + +start_section "Create spack environment" +time spack env create -d "${env_dir}" "${lock_file_path}" --with-view "$view_dir" +time spack -e "${env_dir}" spec -l +time spack -e "${env_dir}" find +end_section + +start_section "Install spack packages" +NCPUS=4 # set fixes low-ish number to avoid deadlocks +time "${SCRIPT_DIR}"/parallel.sh "$NCPUS" spack -e "${env_dir}" install --use-buildcache only \ + | tee install.log \ + | grep -v "^Waiting\|^\[+\]" +end_section + +start_section "Patch up Geant4 data directory" +# ${SCRIPT_DIR}/with_spack_env.sh ${env_dir} geant4-config --install-datasets +geant4_dir=$(spack -e "${env_dir}" location -i geant4) +# Prepare the folder for G4 data, and symlink it to where G4 will look for it +mkdir -p "${geant4_dir}/share/Geant4" +ln -s "${geant4_dir}/share/Geant4/data" "${view_dir}/share/Geant4/data" +end_section + + +start_section "Prepare python environment" +ls -al +venv_dir="${view_dir}/venv" +"${view_dir}"/bin/python3 -m venv \ + --system-site-packages \ + "$venv_dir" + +"${venv_dir}/bin/python3" -m pip install pyyaml jinja2 + +end_section + +start_section "Set environment variables" +if [ -n "${GITHUB_ACTIONS:-}" ]; then + echo "${view_dir}/bin" >> "$GITHUB_PATH" + echo "${venv_dir}/bin" >> "$GITHUB_PATH" +fi +set_env PATH "${venv_dir}/bin:${view_dir}/bin/:${PATH}" +set_env CMAKE_PREFIX_PATH "${venv_dir}:${view_dir}" +set_env LD_LIBRARY_PATH "${view_dir}/lib" +set_env ROOT_INCLUDE_PATH "${view_dir}/include" +# Geant4 puts CLHEP in a subdirectory +set_env ROOT_INCLUDE_PATH "${view_dir}/include/Geant4" +end_section + +# Pythia8 looks for settings in this directory +# set_env PYTHIA8DATA "${destination}/share/Pythia8/xmldoc" + +set +u +set +e diff --git a/CI/dependencies/setup_spack.sh b/CI/dependencies/setup_spack.sh new file mode 100755 index 00000000000..4339644e2bd --- /dev/null +++ b/CI/dependencies/setup_spack.sh @@ -0,0 +1,16 @@ +#!/bin/bash +set -e +set -u + +_spack_version=${SPACK_VERSION:-develop} + +_spack_folder=$1 + +if [ ! -d "${_spack_folder}" ]; then + echo "Cloning spack" + git clone --branch ${_spack_version} -c feature.manyFiles=true https://github.com/spack/spack.git ${_spack_folder} + pushd ${_spack_folder} > /dev/null + git config user.name 'CI' + git config user.email '<>' + popd > /dev/null +fi diff --git a/Core/include/Acts/EventData/VectorMultiTrajectory.hpp b/Core/include/Acts/EventData/VectorMultiTrajectory.hpp index 4238afd88f6..afa7277fffa 100644 --- a/Core/include/Acts/EventData/VectorMultiTrajectory.hpp +++ b/Core/include/Acts/EventData/VectorMultiTrajectory.hpp @@ -500,7 +500,7 @@ class VectorMultiTrajectory final Concepts::eigen_bases_have_same_num_rows && Concepts::eigen_base_is_square && Eigen::PlainObjectBase::RowsAtCompileTime <= - static_cast>(eBoundSize)) + toUnderlying(eBoundSize)) { constexpr std::size_t measdim = val_t::RowsAtCompileTime; diff --git a/Core/include/Acts/Utilities/EigenConcepts.hpp b/Core/include/Acts/Utilities/EigenConcepts.hpp index 4151dd6e237..77718dda92f 100644 --- a/Core/include/Acts/Utilities/EigenConcepts.hpp +++ b/Core/include/Acts/Utilities/EigenConcepts.hpp @@ -37,16 +37,16 @@ concept eigen_base_is_square = eigen_base_is_fixed_size && template concept eigen_bases_have_same_num_rows = eigen_base_is_fixed_size && eigen_base_is_fixed_size && - static_cast(Eigen::PlainObjectBase::RowsAtCompileTime) == - static_cast(Eigen::PlainObjectBase::RowsAtCompileTime); + toUnderlying(Eigen::PlainObjectBase::RowsAtCompileTime) == + toUnderlying(Eigen::PlainObjectBase::RowsAtCompileTime); /// @brief Concept that is true iff T1 and T2 have the same, known at compile /// time, number of columns. template concept eigen_bases_have_same_num_cols = eigen_base_is_fixed_size && eigen_base_is_fixed_size && - static_cast(Eigen::PlainObjectBase::ColsAtCompileTime) == - static_cast(Eigen::PlainObjectBase::ColsAtCompileTime); + toUnderlying(Eigen::PlainObjectBase::ColsAtCompileTime) == + toUnderlying(Eigen::PlainObjectBase::ColsAtCompileTime); /// @brief Concept that is true iff T1 and T2 have the same, known at compile /// time, size. diff --git a/Core/include/Acts/Visualization/Interpolation3D.hpp b/Core/include/Acts/Visualization/Interpolation3D.hpp index ea8e649869e..96298701591 100644 --- a/Core/include/Acts/Visualization/Interpolation3D.hpp +++ b/Core/include/Acts/Visualization/Interpolation3D.hpp @@ -59,8 +59,13 @@ trajectory_type spline(const trajectory_type& inputsRaw, std::size_t nPoints, for (std::size_t i = 0; i < inputs.size(); ++i) { points.col(i) = inputs[i].transpose(); } + + // MARK: fpeMaskBegin(FLTDIV, 1, #4024) + // MARK: fpeMaskBegin(FLTINV, 1, #4024) Eigen::Spline spline3D = Eigen::SplineFitting>::Interpolate(points, 2); + // MARK: fpeMaskEnd(FLTDIV) + // MARK: fpeMaskEnd(FLTINV) double step = 1. / (nPoints - 1); for (std::size_t i = 0; i < nPoints; ++i) { diff --git a/Examples/Algorithms/Geant4HepMC/src/EventRecording.cpp b/Examples/Algorithms/Geant4HepMC/src/EventRecording.cpp index 51826af837a..55da5d9c7a7 100644 --- a/Examples/Algorithms/Geant4HepMC/src/EventRecording.cpp +++ b/Examples/Algorithms/Geant4HepMC/src/EventRecording.cpp @@ -81,12 +81,16 @@ ProcessCode EventRecording::execute(const AlgorithmContext& context) const { part); // Begin with the simulation - m_runManager->BeamOn(1); + // MARK: fpeMaskBegin(FLTINV, 1, #4021) + m_runManager->BeamOn(1); // this is where the actual FPE occurs // Test if the event was aborted - if (Geant4::HepMC3::SteppingAction::instance()->eventAborted()) { + if (Geant4::HepMC3::SteppingAction::instance() + ->eventAborted()) { // this is where the FPE is attributed to due + // to inlining continue; } + // MARK: fpeMaskEnd(FLTINV) // Set event start time HepMC3::GenEvent event = Geant4::HepMC3::EventAction::instance()->event(); diff --git a/Examples/Python/tests/root_file_hashes.txt b/Examples/Python/tests/root_file_hashes.txt index 25f1f003598..ebbe0f6db4c 100644 --- a/Examples/Python/tests/root_file_hashes.txt +++ b/Examples/Python/tests/root_file_hashes.txt @@ -1,8 +1,8 @@ test_pythia8__particles.root: 125182a9647ef3cec71afbc1b9e1676e40c13826c8333d6704345dd5133d5e91 test_fatras__particles_simulation.root: bc970873fef0c2efd86ed5413623802353d2cd04abea72de14e8cdfc0e40076f test_fatras__hits.root: 6e4beb045fa1712c4d14c280ba33c3fa13e4aff9de88d55c3e32f62ad226f724 -test_geant4__particles_simulation.root: 49926c71a9b54e13aa1cc7596d3302baf3c87d8e2c1d0267cb4523f6abdc0ac2 -test_geant4__hits.root: 4c9e704a75f47ed2e61652679a1d6f18fa4d9cf53faa8f8f5bbf7995634207aa +test_geant4__particles_simulation.root: fd10da8000715f7f515001670e713efd8df21501b32e9b2635f0152d63eecaf4 +test_geant4__hits.root: d8efffea763de487454c23c48bde26548df59f2342807b3b486eb98bdc6a1559 test_seeding__estimatedparams.root: 6759004f945cabe03098c94b3eea7e3323acd9f37edfa71641797007336643c8 test_seeding__performance_seeding.root: 992f9c611d30dde0d3f3ab676bab19ada61ab6a4442828e27b65ec5e5b7a2880 test_seeding__particles.root: c423bc666df3674f1a1140dec68ea13f44173232b8057e8a02572aee4f3e7d5b diff --git a/cmake/setup_withdeps.sh.in b/cmake/setup_withdeps.sh.in index ce1dc3506e7..badf09eafb9 100644 --- a/cmake/setup_withdeps.sh.in +++ b/cmake/setup_withdeps.sh.in @@ -44,7 +44,7 @@ export DYLD_LIBRARY_PATH="$script_dir/lib:${DYLD_LIBRARY_PATH}" # activate dependencies if present if [[ -d "@ROOT_DIR@" ]]; then - . @ROOT_BINDIR@/thisroot.sh + . @ROOT_DIR@/../../../bin/thisroot.sh fi if [[ -d "@Geant4_DIR@" ]]; then . @Geant4_INCLUDE_DIR@/../../bin/geant4.sh diff --git a/tasks.md b/tasks.md new file mode 100644 index 00000000000..51f9c35da52 --- /dev/null +++ b/tasks.md @@ -0,0 +1,170 @@ +# Task: Fix Non-explicit Constructors + +This file tracks the progress of making constructors explicit across the ACTS codebase to prevent unintended implicit conversions. The task was initiated based on clang-tidy warnings about single-argument constructors that should be marked explicit. + +The output of the clang-tidy run is in `clang_tidy.txt`, for reference. +The build command is `cmake --build build`. The compilation must be checked after each change is made using this command. + +Some constructors cannot be made explicit due to existing code that relies on implicit conversions. In this case, the necessary changes to other files need to be made, until the compilation is successful. + +Every time an item is completed, the corresponding todo item should be marked as such. + +Procedure: +- Identify the next item in the todo list. +- Make the change to the code. +- Compile the code using `cmake --build build`. The compilation MUST be checked and MUST be successful before moving on. +- If the compilation was successful, mark the item as completed. + + +Some extra notes: +- NEVER update more than one item at once, also NEVER mark more than one item as completed at once. +- ONLY EVER skip an item and make it implicit again, if there is an existing comment explaining why it cannot be explicit. +- Don't prompt for feedback if there were no errors or you were able to fix compilation errors. + + +--- + +# Non-explicit Constructor Fixes Required + +## Core/Utilities +- [x] `Acts::ValueHolder` constructor in `Holders.hpp:55` (skipped due to possible SEGFAULT) + +## Core/Vertexing +- [x] `Acts::AdaptiveMultiVertexFinder` constructor in `AdaptiveMultiVertexFinder.hpp:172` +- [x] `Acts::NumericalTrackLinearizer::Config` constructor in `NumericalTrackLinearizer.hpp:73` +- [x] `Acts::NumericalTrackLinearizer` constructor in `NumericalTrackLinearizer.hpp:93` +- [x] `Acts::GaussianTrackDensity::Config` constructor in `GaussianTrackDensity.hpp:62` +- [x] `Acts::GaussianTrackDensity::State` constructor in `GaussianTrackDensity.hpp:87` +- [x] `Acts::GaussianTrackDensity` constructor in `GaussianTrackDensity.hpp:93` +- [x] `Acts::GaussianTrackDensity::GaussianTrackDensityStore` constructor in `GaussianTrackDensity.hpp:178` +- [x] `Acts::FullBilloirVertexFitter` constructor in `FullBilloirVertexFitter.hpp:64` +- [x] `Acts::AdaptiveMultiVertexFitter::Config` constructor in `AdaptiveMultiVertexFitter.hpp:101` +- [x] `Acts::AdaptiveMultiVertexFitter` constructor in `AdaptiveMultiVertexFitter.hpp:148` +- [x] `Acts::SingleSeedVertexFinder` constructor in `SingleSeedVertexFinder.hpp:102` +- [x] `Acts::IterativeVertexFinder` constructor in `IterativeVertexFinder.hpp:138` +- [x] `Acts::HelicalTrackLinearizer` constructor in `HelicalTrackLinearizer.hpp:63` +- [x] `Acts::ZScanVertexFinder::Config` constructor in `ZScanVertexFinder.hpp:40` +- [x] `Acts::ZScanVertexFinder` constructor in `ZScanVertexFinder.hpp:75` +- [x] `Acts::GaussianGridTrackDensity::Config` constructor in `GaussianGridTrackDensity.hpp:41` +- [x] `Acts::GaussianGridTrackDensity` constructor in `GaussianGridTrackDensity.hpp:82` +- [x] `Acts::BilloirTrack` constructor in `FullBilloirVertexFitter.cpp:24` + +## Core/EventData +- [x] `Acts::SinglyChargedParticleHypothesis` constructor in `ParticleHypothesis.hpp:35` +- [x] `Acts::NeutralParticleHypothesis` constructor in `ParticleHypothesis.hpp:78` +- [x] `Acts::NonNeutralChargedParticleHypothesis` constructor in `ParticleHypothesis.hpp:110` +- [x] `Acts::ParticleHypothesis` constructor in `ParticleHypothesis.hpp:157` +- [x] `Acts::TrackStateRange` constructor in `MultiTrajectory.hpp:111` +- [x] `Acts::ProxyAccessorBase` constructors in `ProxyAccessor.hpp:73,77` +- [x] `Acts::VectorTrackContainer` constructor in `VectorTrackContainer.hpp:207` +- [x] `Acts::ConstVectorTrackContainer` constructors in `VectorTrackContainer.hpp:285,291` +- [x] `Acts::GenericParticleHypothesis` constructors in `GenericParticleHypothesis.hpp:48,60` +- [x] `Acts::CorrectedFreeToBoundTransformer` constructor in `CorrectedTransformationFreeToBound.hpp:75` +- [x] `Acts::DynamicKeyIterator` constructor in `DynamicKeyIterator.hpp:29` +- [x] `Acts::GenericFreeTrackParameters` constructor in `GenericFreeTrackParameters.hpp:114` +- [x] `Acts::TrackStateType` constructor in `TrackStateType.hpp:49` +- [x] `Acts::ConstTrackStateType` constructor in `TrackStateType.hpp:117` +- [x] `Acts::TrackProxy` constructor in `TrackProxy.hpp:117` (skipped: this is a copy constructor from mutable to const, where implicit conversion is expected and safe) +- [x] `Acts::ConstVectorMultiTrajectory` constructors in `VectorMultiTrajectory.hpp:572,575` +- [x] `Acts::TransitiveConstPointer` constructors in `TrackStateProxy.hpp:45,48` +- [x] `Acts::TrackStateProxy` constructor in `TrackStateProxy.hpp:220` + +## Core/Detector +- [x] `Acts::GeometryIdGenerator` constructor in `GeometryIdGenerator.hpp:73` +- [x] `Acts::ChainedGeometryIdGenerator` constructor in `GeometryIdGenerator.hpp:145` +- [x] `Acts::GeometryIdMapper` constructor in `GeometryIdMapper.hpp:55` +- [x] `Acts::Portal` constructor in `Portal.hpp:50` +- [x] `Acts::MultiWireStructureBuilder` constructor in `MultiWireStructureBuilder.hpp:54` +- [x] `Acts::DetectorVolumeBuilder` constructor in `DetectorVolumeBuilder.hpp:57` +- [x] `Acts::VolumeStructureBuilder` constructor in `VolumeStructureBuilder.hpp:56` +- [x] `Acts::IndexedRootVolumeFinderBuilder` constructor in `IndexedRootVolumeFinderBuilder.hpp:29` +- [x] `Acts::CylindricalContainerBuilder` constructors in `CylindricalContainerBuilder.hpp:70,89` +- [x] `Acts::DetectorVolume::ObjectStore` constructor in `DetectorVolume.hpp:87` +- [x] `Acts::DetectorBuilder` constructor in `DetectorBuilder.hpp:53` +- [x] `Acts::LayerStructureBuilder::SurfacesHolder` constructor in `LayerStructureBuilder.hpp:61` +- [x] `Acts::LayerStructureBuilder` constructor in `LayerStructureBuilder.hpp:105` +- [x] `Acts::CuboidalContainerBuilder` constructors in `CuboidalContainerBuilder.hpp:68,87` +- [x] `Acts::MultiWireInternalStructureBuilder` constructor in `MultiWireStructureBuilder.cpp:52` + +## Core/TrackFinding +- [x] `Acts::GbtsConnector` constructor in `GbtsConnector.hpp:40` +- [x] `Acts::TrackSelector::EtaBinnedConfig` constructors in `TrackSelector.hpp:162,168,175` +- [x] `Acts::TrackSelector` constructors in `TrackSelector.hpp:220,224` +- [x] `Acts::CombinatorialKalmanFilter` constructor in `CombinatorialKalmanFilter.hpp:288` + +## Core/Surfaces +- [x] `Acts::CylinderBounds` constructor in `CylinderBounds.hpp:79` +- [x] `Acts::LineBounds` constructor in `LineBounds.hpp:39` +- [x] `Acts::DiscSurface` constructor in `DiscSurface.hpp:85` +- [x] `Acts::SingleElementLookup` constructors in `SurfaceArray.hpp:360,365` +- [x] `Acts::SurfaceArray` constructor in `SurfaceArray.hpp:446` +- [x] `Acts::AnnulusBounds` constructor in `AnnulusBounds.hpp:67` +- [x] `Acts::TrapezoidBounds` constructor in `TrapezoidBounds.hpp:52` +- [x] `Acts::ConvexPolygonBounds` constructors in `ConvexPolygonBounds.hpp:86,91,96,145` +- [x] `Acts::StrawSurface` constructor in `StrawSurface.hpp:52` +- [x] `Acts::DiscTrapezoidBounds` constructor in `DiscTrapezoidBounds.hpp:58` +- [x] `Acts::RadialBounds` constructor in `RadialBounds.hpp:54` +- [x] `Acts::LineSurface` constructor in `LineSurface.hpp:58` +- [x] `Acts::EllipseBounds` constructor in `EllipseBounds.hpp:64` +- [x] `Acts::RectangleBounds` constructor in `RectangleBounds.hpp:51` +- [x] `Acts::DiamondBounds` constructor in `DiamondBounds.hpp:61` +- [x] `Acts::PlaneSurface` constructor in `PlaneSurface.hpp:69` +- [x] `Acts::ConeBounds` constructor in `ConeBounds.hpp:72` +- [x] `Acts::PerigeeSurface` constructors in `PerigeeSurface.hpp:37,42` + +## Core/TrackFitting +- [x] `Acts::KalmanFitter` constructor in `KalmanFitter.hpp:271` +- [x] `Acts::Gx2Fitter` constructor in `GlobalChiSquareFitter.hpp:691` + +## Core/Geometry +- [x] `Acts::GlueVolumesDescriptor` constructor in `GlueVolumesDescriptor.hpp:41` +- [x] `Acts::SurfaceBinningMatcher` constructor in `SurfaceBinningMatcher.hpp:28` +- [x] `Acts::TrapezoidVolumeBounds` constructor in `TrapezoidVolumeBounds.hpp:87` +- [x] `Acts::ProtoLayerHelper` constructor in `ProtoLayerHelper.hpp:40` +- [x] `Acts::KDTreeTrackingGeometryBuilder` constructor in `KDTreeTrackingGeometryBuilder.hpp:67` +- [x] `Acts::CuboidVolumeBuilder` constructor in `CuboidVolumeBuilder.hpp:127` +- [x] `Acts::SurfaceArrayCreator` constructors in `SurfaceArrayCreator.hpp:98,105` +- [x] `Acts::PassiveLayerBuilder` constructor in `PassiveLayerBuilder.hpp:57` +- [x] `Acts::ConeVolumeBounds` constructor in `ConeVolumeBounds.hpp:81` +- [x] `Acts::TrackingVolumeArrayCreator` constructor in `TrackingVolumeArrayCreator.hpp:38` +- [x] `Acts::Layer` constructor in `Layer.hpp:221` +- [ ] `Acts::CylinderVolumeHelper` constructor in `CylinderVolumeHelper.hpp:59` +- [ ] `Acts::TrackingGeometry` constructor in `TrackingGeometry.hpp:51` +- [ ] `Acts::GenericApproachDescriptor` constructor in `GenericApproachDescriptor.hpp:38` +- [ ] `Acts::GeometryHierarchyMap` constructor in `GeometryHierarchyMap.hpp:72` +- [ ] `Acts::GenericCuboidVolumeBounds` constructors in `GenericCuboidVolumeBounds.hpp:42,48` +- [ ] `Acts::CylinderVolumeBuilder` constructor in `CylinderVolumeBuilder.hpp:512` +- [ ] `Acts::LayerCreator` constructor in `LayerCreator.hpp:62` +- [ ] `Acts::TrackingVolume` constructor in `TrackingVolume.hpp:156` +- [ ] `Acts::TrackingGeometryBuilder` constructor in `TrackingGeometryBuilder.hpp:62` + +## Core/MagneticField +- [ ] `Acts::ConstantBField::Cache` constructor in `ConstantBField.hpp:26` +- [ ] `Acts::InterpolatedBFieldMap::Cache` constructor in `InterpolatedBFieldMap.hpp:148` +- [ ] `Acts::InterpolatedBFieldMap` constructor in `InterpolatedBFieldMap.hpp:177` +- [ ] `Acts::NullBField::Cache` constructor in `NullBField.hpp:23` +- [ ] `Acts::SolenoidBField::Cache` constructor in `SolenoidBField.hpp:73` +- [ ] `Acts::SolenoidBField` constructor in `SolenoidBField.hpp:93` + +## Core/Material +- [x] `Acts::ISurfaceMaterial` constructor in `ISurfaceMaterial.hpp:38` +- [x] `Acts::MaterialComposition` constructor in `MaterialComposition.hpp:101` +- [x] `Acts::ProtoSurfaceMaterialT` constructor in `ProtoSurfaceMaterial.hpp:39` +- [x] `Acts::BinnedSurfaceMaterialAccumulater` constructor in `BinnedSurfaceMaterialAccumulater.hpp:48` +- [ ] `Acts::InteractionVolume` constructors in `MaterialInteraction.hpp:35,39` +- [ ] `Acts::HomogeneousVolumeMaterial` constructor in `HomogeneousVolumeMaterial.hpp:28` +- [ ] `Acts::HomogeneousSurfaceMaterial` constructor in `HomogeneousSurfaceMaterial.hpp:33` +- [ ] `Acts::MaterialMapper` constructor in `MaterialMapper.hpp:63` +- [ ] `Acts::MaterialValidater` constructor in `MaterialValidater.hpp:38` +- [ ] `Acts::AccumulatedSurfaceMaterial` constructors in `AccumulatedSurfaceMaterial.hpp:40,52` +- [ ] `Acts::ProtoVolumeMaterial` constructor in `ProtoVolumeMaterial.hpp:38` +- [ ] `Acts::PropagatorMaterialAssigner` constructor in `PropagatorMaterialAssigner.hpp:101` +- [ ] `Acts::InterpolatedMaterialMap` constructor in `InterpolatedMaterialMap.hpp:250` +- [ ] `Acts::IntersectionMaterialAssigner` constructor in `IntersectionMaterialAssigner.hpp:50` + +## Core/Clusterization +- [ ] `Acts::TimedConnect` constructor in `TimedClusterization.hpp:28` + +## Conversion Operators to Fix +- [ ] `Acts::CorrectedTransformationFreeToBound::operator bool()` in `CorrectedTransformationFreeToBound.hpp:53` +- [ ] `Acts::MaterialComposition::operator bool()` in `MaterialComposition.hpp:128`