Skip to content

Merge pull request #1059 from luxonis/sync_all_cameras #3109

Merge pull request #1059 from luxonis/sync_all_cameras

Merge pull request #1059 from luxonis/sync_all_cameras #3109

Workflow file for this run

name: Depthai Python CI/CD
# Controls when the action will run. Triggers the workflow on push
###################################
# WARNING #########################
# as self-hosted runners are used, and security policy for them has not been yet determined by GitHub
# pay close attention to not enable workflows on pull_request events
# TLDR: do NOT add 'pull_request' here for the time being
on:
workflow_dispatch:
push:
branches:
- main
- develop
- gen2
- gen2_develop
tags:
- 'v*'
###################################
###################################
env:
CMAKE_WINDOWS_SDK_VERSION: '10.0.18362.0'
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# Job which builds docstrings for the rest of the wheel builds
build-docstrings:
runs-on: ubuntu-latest
steps:
- name: Cache .hunter folder
uses: actions/cache@v3
with:
path: ~/.hunter
key: hunter-ubuntu-latest
- name: List .hunter cache directory
run: ls -a -l ~/.hunter/_Base/ || true
- uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.8
- name: Install dependencies
run: |
python -m pip install --upgrade pip
sudo apt install libusb-1.0-0-dev
python -m pip install -r docs/requirements_mkdoc.txt
- name: Configure project
run: cmake -S . -B build -DDEPTHAI_PYTHON_FORCE_DOCSTRINGS=ON -DDEPTHAI_PYTHON_DOCSTRINGS_OUTPUT="$PWD/docstrings/depthai_python_docstring.hpp"
- name: Build target 'pybind11_mkdoc'
run: cmake --build build --target pybind11_mkdoc --parallel 4
- name: Upload docstring artifacts
uses: actions/upload-artifact@v3
with:
name: docstrings
path: docstrings/
retention-days: 1
# Build and test bindings
pytest:
needs: build-docstrings
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
runs-on: ${{ matrix.os }}
steps:
- name: Print home directory
run: echo Home directory inside container $HOME
- name: Setup cmake
if: matrix.os == 'macos-latest'
uses: jwlawson/[email protected]
- name: Cache .hunter folder
if: matrix.os != 'windows-latest'
uses: actions/cache@v3
with:
path: ~/.hunter/
key: hunter-pytest-${{ matrix.os }}
- name: Cache .hunter folder
if: matrix.os == 'windows-latest'
uses: actions/cache@v3
with:
path: C:/.hunter/
key: hunter-pytest-${{ matrix.os }}
- uses: actions/checkout@v3
with:
submodules: 'recursive'
- uses: actions/download-artifact@v3
with:
name: 'docstrings'
path: docstrings
- name: Specify docstring to use while building the wheel
run: echo "DEPTHAI_PYTHON_DOCSTRINGS_INPUT=$PWD/docstrings/depthai_python_docstring.hpp" >> $GITHUB_ENV
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install dependencies (Ubuntu)
if: matrix.os == 'ubuntu-latest'
run: |
python -m pip install --upgrade pip
sudo apt install libusb-1.0-0-dev
- name: Install dependencies (MacOS)
if: matrix.os == 'macos-latest'
run: |
python -m pip install --upgrade pip
brew install libusb
- name: Install pytest
run: |
python -m pip install pytest numpy
- name: Compile
run: |
cmake -S . -B build -D CMAKE_BUILD_TYPE=Release -D DEPTHAI_PYTHON_DOCSTRINGS_INPUT=$PWD/docstrings/depthai_python_docstring.hpp -D DEPTHAI_PYTHON_ENABLE_TESTS=ON
cmake --build build --parallel 4
- name: Test
run: |
cmake --build build --target pytest --config Release
# This job builds wheels for armhf arch (RPi)
build-linux-armhf:
needs: build-docstrings
strategy:
matrix:
rpi-os: [rpi-buster, rpi-bullseye, rpi-bookworm]
runs-on: ${{ matrix.rpi-os }}
steps:
- name: Print home directory
run: echo Home directory inside container $HOME
- uses: actions/checkout@v3
with:
submodules: 'recursive'
- uses: actions/download-artifact@v3
with:
name: 'docstrings'
path: docstrings
- name: Specify docstring to use while building the wheel
run: echo "DEPTHAI_PYTHON_DOCSTRINGS_INPUT=$PWD/docstrings/depthai_python_docstring.hpp" >> $GITHUB_ENV
- name: Append build hash if not a tagged commit
if: startsWith(github.ref, 'refs/tags/v') != true
run: echo "BUILD_COMMIT_HASH=${{github.sha}}" >> $GITHUB_ENV
- name: Building wheel
run: CMAKE_ARGS='-DDEPTHAI_ENABLE_CURL=OFF' python3 -m pip wheel . -w ./wheelhouse/ --verbose
- name: Auditing wheels and adding armv6l tag (Running on RPi, binaries compiled as armv6l)
run: |
# python3 -m pip install -U wheel auditwheel # Called once when setting up the runner
for whl in wheelhouse/*.whl; do auditwheel repair "$whl" --plat linux_armv7l -w wheelhouse/preaudited/; done
for whl in wheelhouse/preaudited/*.whl; do python3 -m wheel tags --platform-tag +linux_armv6l "$whl"; done
mkdir -p wheelhouse/audited/
for whl in wheelhouse/preaudited/*linux_armv6l*.whl; do cp "$whl" wheelhouse/audited/$(basename $whl); done
- name: Archive wheel artifacts
uses: actions/upload-artifact@v3
with:
name: audited-wheels
path: wheelhouse/audited/
- name: Deploy wheels to artifactory (if not a release)
if: startsWith(github.ref, 'refs/tags/v') != true
run: bash ./ci/upload-artifactory.sh
env:
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }}
ARTIFACTORY_USER: ${{ secrets.ARTIFACTORY_USER }}
ARTIFACTORY_PASS: ${{ secrets.ARTIFACTORY_PASS }}
# This job builds wheels for Windows x86_64 arch
build-windows-x86_64:
needs: build-docstrings
runs-on: windows-latest
strategy:
matrix:
python-version: [3.7, 3.8, 3.9, '3.10', '3.11', '3.12']
python-architecture: [x64, x86]
fail-fast: false
steps:
- name: Cache .hunter folder
uses: actions/cache@v3
with:
path: C:/.hunter
key: hunter-msvc
- uses: actions/checkout@v3
with:
submodules: 'recursive'
- uses: actions/download-artifact@v3
with:
name: 'docstrings'
path: docstrings
- name: Specify docstring to use while building the wheel
run: echo "DEPTHAI_PYTHON_DOCSTRINGS_INPUT=$PWD/docstrings/depthai_python_docstring.hpp" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
- name: Select Windows SDK
run: echo "CMAKE_ARGS=-DCMAKE_SYSTEM_VERSION=${{ env.CMAKE_WINDOWS_SDK_VERSION }}" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
- name: Install dependencies
run: choco install strawberryperl
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
architecture: ${{ matrix.python-architecture }}
- name: Append build hash if not a tagged commit
if: startsWith(github.ref, 'refs/tags/v') != true
run: echo "BUILD_COMMIT_HASH=${{github.sha}}" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
- name: Install dependencies
run: |
python -m pip install --upgrade pip
- name: Building wheels
run: python -m pip wheel . -w ./wheelhouse/audited/ --verbose
- name: Archive wheel artifacts
uses: actions/upload-artifact@v3
with:
name: audited-wheels
path: wheelhouse/audited/
- name: Deploy wheels to artifactory (if not a release)
if: startsWith(github.ref, 'refs/tags/v') != true
run: bash ./ci/upload-artifactory.sh
env:
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }}
ARTIFACTORY_USER: ${{ secrets.ARTIFACTORY_USER }}
ARTIFACTORY_PASS: ${{ secrets.ARTIFACTORY_PASS }}
# This job builds wheels for macOS x86_64 arch
build-macos:
needs: build-docstrings
strategy:
matrix:
python-version: [3.8, 3.9, '3.10', '3.11', '3.12']
os: [macos-13, macos-14] # macos-13 is x64, macos-14 is arm64
runs-on: ${{ matrix.os }}
steps:
- name: Cache .hunter folder
uses: actions/cache@v3
with:
path: ~/.hunter
key: hunter-macos-latest
- name: List .hunter cache directory
run: |
ls -a -l ~/.hunter/_Base/ || true
echo "PATH=$PATH"
- uses: actions/checkout@v3
with:
submodules: 'recursive'
- uses: actions/download-artifact@v3
with:
name: 'docstrings'
path: docstrings
- name: Specify docstring to use while building the wheel
run: echo "DEPTHAI_PYTHON_DOCSTRINGS_INPUT=$PWD/docstrings/depthai_python_docstring.hpp" >> $GITHUB_ENV
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Append build hash if not a tagged commit
if: startsWith(github.ref, 'refs/tags/v') != true
run: echo "BUILD_COMMIT_HASH=${{github.sha}}" >> $GITHUB_ENV
- name: Install dependencies
run: |
python -m pip install --upgrade pip
brew install libusb
python -m pip install delocate
- name: Set macos deployment target
run: echo "MACOSX_DEPLOYMENT_TARGET=10.9" >> $GITHUB_ENV
- name: Building wheels
run: python -m pip wheel . -w ./wheelhouse/ --verbose
- name: Auditing wheels
run: ci/repair-whl-macos.sh `pwd`/wheelhouse/* `pwd`/wheelhouse/audited
- name: Archive wheel artifacts
uses: actions/upload-artifact@v3
with:
name: audited-wheels
path: wheelhouse/audited/
- name: Deploy wheels to artifactory (if not a release)
if: startsWith(github.ref, 'refs/tags/v') != true
run: bash ./ci/upload-artifactory.sh
env:
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }}
ARTIFACTORY_USER: ${{ secrets.ARTIFACTORY_USER }}
ARTIFACTORY_PASS: ${{ secrets.ARTIFACTORY_PASS }}
# This job builds wheels for x86_64 arch
build-linux-x86_64:
needs: build-docstrings
runs-on: ubuntu-latest
container:
image: quay.io/pypa/manylinux2014_x86_64:2024-01-08-eb135ed
env:
PLAT: manylinux2014_x86_64
steps:
- name: Cache .hunter folder
uses: actions/cache@v3
with:
path: ~/.hunter
key: hunter-x86_64
- uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Installing libusb1-devel dependency
run: yum install -y --disableplugin=fastestmirror libusb1-devel perl-core
- name: Installing cmake dependency
run: |
/opt/python/cp38-cp38/bin/python3.8 -m pip install cmake
ln -s /opt/python/cp38-cp38/bin/cmake /bin/
- name: Create folder structure
run: mkdir -p wheelhouse/audited/
- uses: actions/download-artifact@v3
with:
name: 'docstrings'
path: docstrings
- name: Specify docstring to use while building the wheel
run: echo "DEPTHAI_PYTHON_DOCSTRINGS_INPUT=$PWD/docstrings/depthai_python_docstring.hpp" >> $GITHUB_ENV
- name: Build and install depthai-core
run: |
cmake -S depthai-core/ -B build_core -D CMAKE_BUILD_TYPE=Release -D CMAKE_TOOLCHAIN_FILE=$PWD/cmake/toolchain/pic.cmake
cmake --build build_core --target install --parallel 4
echo "DEPTHAI_INSTALLATION_DIR=$PWD/build_core/install/" >> $GITHUB_ENV
- name: Append build hash if not a tagged commit
if: startsWith(github.ref, 'refs/tags/v') != true
run: echo "BUILD_COMMIT_HASH=${{github.sha}}" >> $GITHUB_ENV
- name: Building source distribution
run: |
/opt/python/cp38-cp38/bin/python3.8 setup.py sdist --formats=gztar
mv dist/* wheelhouse/audited/
- name: Build wheels
run: for PYBIN in /opt/python/cp3{7..12}*/bin; do "${PYBIN}/pip" wheel . -w ./wheelhouse/ --verbose; done
- name: Audit wheels
run: for whl in wheelhouse/*.whl; do auditwheel repair "$whl" --plat $PLAT -w wheelhouse/audited/; done
- name: Archive wheel artifacts
uses: actions/upload-artifact@v3
with:
name: audited-wheels
path: wheelhouse/audited/
- name: Deploy wheels to artifactory (if not a release)
if: startsWith(github.ref, 'refs/tags/v') != true
run: bash ./ci/upload-artifactory.sh
env:
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }}
ARTIFACTORY_USER: ${{ secrets.ARTIFACTORY_USER }}
ARTIFACTORY_PASS: ${{ secrets.ARTIFACTORY_PASS }}
# This job builds wheels for ARM64 arch
build-linux-arm64:
needs: build-docstrings
runs-on: [self-hosted, linux, ARM64]
container:
image: quay.io/pypa/manylinux2014_aarch64:2024-01-08-eb135ed
env:
PLAT: manylinux2014_aarch64
# Mount local hunter cache directory, instead of transfering to Github and back
volumes:
- /.hunter:/github/home/.hunter
steps:
- uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Installing libusb1-devel dependency
run: yum install -y --disableplugin=fastestmirror libusb1-devel perl-core
- name: Installing cmake dependency
run: |
/opt/python/cp38-cp38/bin/python3.8 -m pip install cmake
ln -s /opt/python/cp38-cp38/bin/cmake /bin/
- name: Create folder structure
run: mkdir -p wheelhouse/audited/
- uses: actions/download-artifact@v3
with:
name: 'docstrings'
path: docstrings
- name: Specify docstring to use while building the wheel
run: echo "DEPTHAI_PYTHON_DOCSTRINGS_INPUT=$PWD/docstrings/depthai_python_docstring.hpp" >> $GITHUB_ENV
- name: Build and install depthai-core
run: |
cmake -S depthai-core/ -B build_core -D CMAKE_BUILD_TYPE=Release -D CMAKE_TOOLCHAIN_FILE=$PWD/cmake/toolchain/pic.cmake
cmake --build build_core --target install --parallel 4
echo "DEPTHAI_INSTALLATION_DIR=$PWD/build_core/install/" >> $GITHUB_ENV
- name: Append build hash if not a tagged commit
if: startsWith(github.ref, 'refs/tags/v') != true
run: echo "BUILD_COMMIT_HASH=${{github.sha}}" >> $GITHUB_ENV
- name: Building wheels
run: for PYBIN in /opt/python/cp3{7..12}*/bin; do "${PYBIN}/pip" wheel . -w ./wheelhouse/ --verbose; done
- name: Auditing wheels
run: for whl in wheelhouse/*.whl; do auditwheel repair "$whl" --plat $PLAT -w wheelhouse/audited/; done
- name: Archive wheel artifacts
uses: actions/upload-artifact@v3
with:
name: audited-wheels
path: wheelhouse/audited/
- name: Deploy wheels to artifactory (if not a release)
if: startsWith(github.ref, 'refs/tags/v') != true
run: bash ./ci/upload-artifactory.sh
env:
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }}
ARTIFACTORY_USER: ${{ secrets.ARTIFACTORY_USER }}
ARTIFACTORY_PASS: ${{ secrets.ARTIFACTORY_PASS }}
release:
if: startsWith(github.ref, 'refs/tags/v')
needs: [pytest, build-linux-armhf, build-windows-x86_64, build-macos, build-linux-x86_64, build-linux-arm64]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
submodules: 'recursive'
- uses: actions/setup-python@v4
with:
python-version: '3.8'
- name: Check if version matches
run: python3.8 -c 'import find_version as v; exit(0) if "${{ github.ref_name }}" == f"v{v.get_package_version()}" else exit(1)'
# Create GitHub release
- uses: actions/create-release@master
id: createRelease
name: Create ${{ github.ref_name }} depthai-python release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ github.ref }}
release_name: Release ${{ github.ref }}
body: |
## Features
## Bug fixes
## Misc
draft: true
# Deploy to PyPi and Artifactory. Only when a commit is tagged
deploy:
if: startsWith(github.ref, 'refs/tags/v')
needs: [release]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
submodules: 'recursive'
- uses: actions/download-artifact@v3
with:
name: audited-wheels
path: wheelhouse/audited/
- name: List files
run: ls -lah
- name: Run deploy to PyPi
run: bash ./ci/upload-pypi.sh
env:
PYPI_SERVER: ${{ secrets.PYPI_SERVER }}
PYPI_USER: ${{ secrets.PYPI_USER }}
PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
- name: Run deploy to Artifactory
run: bash ./ci/upload-artifactory-release.sh
env:
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }}
ARTIFACTORY_USER: ${{ secrets.ARTIFACTORY_USER }}
ARTIFACTORY_PASS: ${{ secrets.ARTIFACTORY_PASS }}
# notify_hil_workflow_linux_x86_64:
# needs: [build-linux-x86_64]
# runs-on: ubuntu-latest
# steps:
# - name: Repository Dispatch
# uses: peter-evans/repository-dispatch@v2
# with:
# token: ${{ secrets.HIL_CORE_DISPATCH_TOKEN }}
# repository: luxonis/depthai-core-hil-tests
# event-type: python-hil-event
# client-payload: '{"ref": "${{ github.ref }}", "sha": "${{ github.sha }}"}'
notify_hil_workflow:
needs: [build-linux-armhf, build-linux-x86_64]
runs-on: ubuntu-latest
steps:
- name: Dispatch an action and get the run ID
uses: codex-/return-dispatch@v1
id: return_dispatch
with:
token: ${{ secrets.HIL_CORE_DISPATCH_TOKEN }} # Note this is NOT GITHUB_TOKEN but a PAT
ref: main # or refs/heads/target_branch
repo: depthai-core-hil-tests
owner: luxonis
workflow: regression_test.yml
workflow_inputs: '{"commit": "${{ github.ref }}", "parent_url": "https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"}'
workflow_timeout_seconds: 300 # was 120 Default: 300
- name: Release
run: echo "https://github.com/luxonis/depthai-core-hil-tests/actions/runs/${{steps.return_dispatch.outputs.run_id}}" >> $GITHUB_STEP_SUMMARY