Rollback MSVC version #8
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: __build_steps | ||
on: | ||
workflow_call: | ||
inputs: | ||
job_type: {required: true, type: string, description: Selects the steps to enable} | ||
cmake_preset_type: {required: true, type: string, description: release/debug} | ||
matrix: {required: true, type: string, description: JSON string to feed into the matrix} | ||
compile-override: {required: false, type: string, description: Parameter to override the agent that is used for compiling e.g. compile-on-ec2} | ||
cibw_image_tag: {required: false, type: string, description: Linux only. As built by cibw_docker_image.yml workflow} | ||
cibw_version: {required: false, type: string, description: build-python-wheels only. Must match the cibw_image_tag} | ||
python_deps_ids: {default: '[""]', type: string, description: build-python-wheels test matrix parameter. JSON string.} | ||
python3: {default: -1, type: number, description: Python 3 minor version} | ||
persistent_storage: {default: "false", type: string, description: Specifies whether the python tests should tests against real storages e.g. AWS S3 } | ||
pytest_xdist_mode: {default: "", type: string, description: additional argument to pass for pytest-xdist} | ||
jobs: | ||
start_ec2_runner: | ||
if: inputs.compile-override == 'compile-on-ec2' | ||
uses: ./.github/workflows/ec2_runner_jobs.yml | ||
secrets: inherit | ||
with: | ||
job_type: start | ||
compile: | ||
needs: [start_ec2_runner] | ||
if: | | ||
always() && | ||
!cancelled() | ||
strategy: | ||
matrix: | ||
# Declaring the dummy fields here to aid the Github Actions linting in VSCode and to provide documentation | ||
os: [0] # Decouples the steps from any distro version changes | ||
cmake_preset_prefix: [0] | ||
cibw_build_suffix: [0] | ||
envs: [0] | ||
build_dir: [0] # Must be an absolute path | ||
vcpkg_installed_dir: [0] | ||
vcpkg_packages_dir: [0] | ||
symbols: [0] # Glob for symbol symbol files. Used for including in build-python-wheels builds and exclusion on others. | ||
do_not_archive: [0] | ||
test_services: [0] # Github service containers to spin up for the pytest run | ||
container: [0] | ||
exclude: | ||
- os: 0 | ||
include: | ||
- ${{fromJSON(inputs.matrix)[0]}} # The items after 0 are for tests only | ||
runs-on: ${{ needs.start_ec2_runner.status != 'failure' && needs.start_ec2_runner.outputs.label || matrix.distro}} | ||
container: ${{ (matrix.os == 'linux' && inputs.job_type != 'build-python-wheels') && matrix.container || null}} | ||
env: | ||
SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} # Setting this env var enables the caching | ||
VCPKG_NUGET_USER: ${{secrets.VCPKG_NUGET_USER || github.repository_owner}} | ||
VCPKG_NUGET_TOKEN: ${{secrets.VCPKG_NUGET_TOKEN || secrets.GITHUB_TOKEN}} | ||
VCPKG_MAN_NUGET_USER: ${{secrets.VCPKG_MAN_NUGET_USER}} # For forks to download pre-compiled dependencies from the Man repo | ||
VCPKG_MAN_NUGET_TOKEN: ${{secrets.VCPKG_MAN_NUGET_TOKEN}} | ||
CMAKE_C_COMPILER_LAUNCHER: sccache | ||
CMAKE_CXX_COMPILER_LAUNCHER: sccache | ||
ARCTIC_CMAKE_PRESET: ${{matrix.cmake_preset_prefix}}-${{inputs.cmake_preset_type}} | ||
ARCTICDB_BUILD_DIR: ${{matrix.build_dir}} | ||
ARCTICDB_VCPKG_INSTALLED_DIR: ${{matrix.vcpkg_installed_dir}} | ||
ARCTICDB_VCPKG_PACKAGES_DIR: ${{matrix.vcpkg_packages_dir}} | ||
CIBW_ENVIRONMENT_PASS_LINUX: SCCACHE_GHA_VERSION ACTIONS_CACHE_URL ACTIONS_RUNTIME_TOKEN VCPKG_INSTALLATION_ROOT | ||
VCPKG_BINARY_SOURCES VCPKG_NUGET_USER VCPKG_NUGET_TOKEN VCPKG_MAN_NUGET_USER VCPKG_MAN_NUGET_TOKEN | ||
CMAKE_C_COMPILER_LAUNCHER CMAKE_CXX_COMPILER_LAUNCHER CMAKE_BUILD_PARALLEL_LEVEL ARCTIC_CMAKE_PRESET | ||
ARCTICDB_BUILD_DIR TEST_OUTPUT_DIR ARCTICDB_VCPKG_INSTALLED_DIR ARCTICDB_VCPKG_PACKAGES_DIR | ||
ARCTICDB_DEBUG_FIND_PYTHON: ${{vars.ARCTICDB_DEBUG_FIND_PYTHON}} | ||
python_impl_name: ${{inputs.python3 > 0 && format('cp3{0}', inputs.python3) || 'default'}} | ||
CIBW_BUILD: ${{format('cp3{0}-{1}', inputs.python3, matrix.cibw_build_suffix)}} | ||
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true | ||
defaults: | ||
run: {shell: bash} | ||
steps: | ||
- name: Checkout | ||
uses: actions/[email protected] | ||
with: | ||
submodules: recursive # Just in case a dep has its own third-party deps | ||
- name: Configure sccache | ||
uses: mozilla-actions/[email protected] | ||
with: | ||
version: "v0.4.0" | ||
- name: Windows Pagefile | ||
if: matrix.os == 'windows' | ||
uses: al-cheb/[email protected] | ||
with: | ||
minimum-size: 2GB | ||
maximum-size: 6GB | ||
disk-root: "D:" # This is also the checkout directory. Total size 12GB. | ||
continue-on-error: true | ||
- name: Setup VS2022 BuildTools 17.11.x | ||
if: matrix.os == 'windows' | ||
run: "$(vswhere.exe -version 17.11 -property installationPath)/VC/Auxiliary/Build/vcvarsall.bat" x64 | ||
- name: Enable Windows compiler commands | ||
if: matrix.os == 'windows' | ||
uses: ilammy/[email protected] | ||
- name: Extra envs | ||
# This has to come after msvc-dev-cmd to overwrite the bad VCPKG_ROOT it sets | ||
run: | | ||
HOME=~ . build_tooling/vcpkg_caching.sh # Linux build-python-wheels needs another call in CIBW | ||
echo -e "VCPKG_BINARY_SOURCES=$VCPKG_BINARY_SOURCES | ||
VCPKG_ROOT=$PLATFORM_VCPKG_ROOT | ||
${{matrix.envs || ''}}" | tee -a $GITHUB_ENV | ||
cmake -P cpp/CMake/CpuCount.cmake | sed 's/^-- //' | tee -a $GITHUB_ENV | ||
env: | ||
CMAKE_BUILD_PARALLEL_LEVEL: ${{vars.CMAKE_BUILD_PARALLEL_LEVEL}} | ||
# ========================= Leader steps ========================= | ||
- name: Remove GitHub default packages (baseimage) # To save space | ||
if: inputs.job_type == 'build-python-wheels' && matrix.os == 'linux' | ||
uses: jlumbroso/free-disk-space@main | ||
- name: Remove GitHub default packages (manylinux) # To save space | ||
if: inputs.job_type == 'cpp-tests' && matrix.os == 'linux' | ||
run: | | ||
du -m /mnt/usr/local/lib/ | sort -n | tail -n 50 | ||
nohup rm -rf /mnt/usr/local/lib/android & | ||
- name: Find and remove ccache # See PR: #945 | ||
if: matrix.os == 'windows' | ||
run: rm $(which ccache) || true | ||
- name: Prepare C++ compilation env | ||
if: inputs.job_type != 'build-python-wheels' | ||
run: . build_tooling/prep_cpp_build.sh # Also applies to Windows | ||
- name: CMake compile | ||
if: inputs.job_type != 'build-python-wheels' | ||
# We are pinning the version to 10.6 because >= 10.7, use node20 which is not supported in the container | ||
uses: lukka/[email protected] | ||
with: | ||
cmakeListsTxtPath: ${{github.workspace}}/cpp/CMakeLists.txt | ||
configurePreset: ${{env.ARCTIC_CMAKE_PRESET}} | ||
configurePresetAdditionalArgs: "['-DVCPKG_INSTALL_OPTIONS=--clean-after-build']" | ||
buildPreset: ${{env.ARCTIC_CMAKE_PRESET}} | ||
- name: Compile C++ tests | ||
if: inputs.job_type == 'cpp-tests' | ||
run: cd cpp; cmake --build --preset $ARCTIC_CMAKE_PRESET --target install | ||
- name: C++ Rapidcheck | ||
if: inputs.job_type == 'cpp-tests' | ||
run: cpp/out/install/arcticdb_rapidcheck_tests | ||
- name: C++ unit tests | ||
if: inputs.job_type == 'cpp-tests' | ||
run: | | ||
cd cpp/out | ||
install/test_unit_arcticdb --gtest_output=json:test_unit_arcticdb.json \ | ||
--gtest_filter=-TestNfsBackedStorage.*:TestS3Storage.* || true | ||
[[ $(jq '.tests' test_unit_arcticdb.json) -gt 0 ]] | ||
[[ $(jq '.failures' test_unit_arcticdb.json) -eq 0 ]] | ||
[[ $(jq '.errors' test_unit_arcticdb.json) -eq 0 ]] | ||
env: | ||
ARCTICDB_memory_loglevel: INFO | ||
# We don't do anything with the benchmarks automatically yet, but check that they at least compile and run. | ||
- name: Compile C++ Benchmarks | ||
if: inputs.job_type == 'cpp-tests' | ||
run: cd cpp; cmake --build --preset $ARCTIC_CMAKE_PRESET --target benchmarks -j $CMAKE_BUILD_PARALLEL_LEVEL | ||
- name: Run C++ Benchmarks | ||
if: inputs.job_type == 'cpp-tests' | ||
run: cpp/out/${ARCTIC_CMAKE_PRESET}-build/arcticdb/benchmarks | ||
# ========================= build-python-wheels (CIBW) steps ========================= | ||
- name: Get CIBuildWheel image & metadata | ||
if: inputs.job_type == 'build-python-wheels' && matrix.os == 'linux' | ||
run: | | ||
docker login ghcr.io -u token -p "${{secrets.GITHUB_TOKEN}}" | ||
docker pull "${{inputs.cibw_image_tag}}" | ||
docker inspect --type=image "${{inputs.cibw_image_tag}}" \ | ||
--format='manylinux_image={{index .Config.Labels "io.arcticdb.base"}}' | tee -a $GITHUB_ENV | ||
- name: Build wheel | ||
if: inputs.job_type == 'build-python-wheels' | ||
run: pipx run cibuildwheel==${{inputs.cibw_version}} | ||
env: | ||
CIBW_MANYLINUX_X86_64_IMAGE: ${{inputs.cibw_image_tag}} | ||
- name: Store wheel artifact | ||
if: inputs.job_type == 'build-python-wheels' | ||
uses: actions/[email protected] | ||
with: | ||
name: wheel-${{env.CIBW_BUILD}} | ||
path: wheelhouse/*.whl | ||
- name: Discover test directory names | ||
if: inputs.job_type == 'build-python-wheels' | ||
# We only run the compat tests on for newer python versions. | ||
# There are so few nonreg tests, run them in the hypothesis runner. | ||
run: | | ||
if [ ${{inputs.python3}} -gt 6 ] | ||
then | ||
find python/tests/* -maxdepth 0 -type d ! -regex '.*\(__pycache__\|util\|nonreg\|scripts\)' -printf '"%f",' | | ||
sed 's/^/test_dirs=[/ ; s/"hypothesis"/"{hypothesis,nonreg,scripts}"/ ; s/,$/]/' | tee -a $GITHUB_ENV | ||
else | ||
find python/tests/* -maxdepth 0 -type d ! -regex '.*\(__pycache__\|util\|nonreg\|scripts\|compat\)' -printf '"%f",' | | ||
sed 's/^/test_dirs=[/ ; s/"hypothesis"/"{hypothesis,nonreg,scripts}"/ ; s/,$/]/' | tee -a $GITHUB_ENV | ||
fi | ||
# ========================= Common ========================= | ||
- name: Disk usage | ||
if: always() | ||
run: du -m . ${{matrix.build_dir}} ${{matrix.vcpkg_packages_dir}} | sort -n | tail -n 50 || true; df -h | ||
continue-on-error: true | ||
- name: Make build directory readable for archiving | ||
if: inputs.job_type == 'build-python-wheels' && matrix.os == 'linux' && always() | ||
run: sudo chown -R $UID ${{matrix.build_dir}} | ||
- name: Archive build metadata | ||
uses: actions/[email protected] | ||
if: always() | ||
env: | ||
_exclusion: "\n!${{matrix.build_dir}}/**/" | ||
with: | ||
name: build-metadata-${{inputs.job_type}}-${{matrix.os}}-${{env.python_impl_name}} | ||
retention-days: ${{inputs.job_type == 'cpp-tests' && 7 || 90}} | ||
# On Windows, exclusions like "!**/*.ext" are prefixed with a drive letter (D:\) of the current working dir | ||
# before matching. This breaks since we moved the build_dir to C:. Work around by templating exclusions: | ||
path: ${{matrix.build_dir}}/*-build | ||
${{env._exclusion}}${{inputs.job_type == 'build-python-wheels' && 'nofile' || matrix.symbols}} | ||
${{env._exclusion}}${{join(matrix.do_not_archive, env._exclusion)}} | ||
outputs: | ||
manylinux_image: ${{env.manylinux_image}} | ||
python_impl_name: ${{env.python_impl_name}} | ||
test_dirs: ${{env.test_dirs}} | ||
cibw_build: ${{env.CIBW_BUILD}} | ||
stop-ec2-runner: | ||
needs: [start_ec2_runner, compile] | ||
if: | | ||
always() && | ||
inputs.compile-override == 'compile-on-ec2' | ||
uses: ./.github/workflows/ec2_runner_jobs.yml | ||
secrets: inherit | ||
with: | ||
job_type: stop | ||
label: ${{ needs.start_ec2_runner.outputs.label }} | ||
ec2-instance-id: ${{ needs.start_ec2_runner.outputs.ec2-instance-id }} | ||
python_tests: | ||
if: | | ||
always() && | ||
!failure() && | ||
!cancelled() && | ||
inputs.job_type == 'build-python-wheels' | ||
needs: [compile] | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
type: ${{fromJSON(vars.TEST_DIRS_OVERRIDE || needs.compile.outputs.test_dirs)}} | ||
python_deps_id: ${{fromJson(inputs.python_deps_ids)}} | ||
include: | ||
${{fromJSON(inputs.matrix)}} | ||
name: ${{matrix.type}}${{matrix.python_deps_id}} | ||
runs-on: ${{matrix.distro}} | ||
container: ${{matrix.os == 'linux' && needs.compile.outputs.manylinux_image || null}} | ||
defaults: | ||
run: {shell: bash} | ||
services: ${{matrix.test_services}} | ||
env: | ||
python_impl_name: ${{needs.compile.outputs.python_impl_name}} | ||
distinguishing_name: ${{matrix.os}}-${{needs.compile.outputs.python_impl_name}}-${{matrix.type}}${{matrix.python_deps_id}} | ||
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true | ||
steps: | ||
- name: Checkout | ||
uses: actions/[email protected] | ||
- name: Get wheel artifact | ||
uses: actions/[email protected] | ||
with: | ||
name: wheel-${{needs.compile.outputs.cibw_build}} | ||
path: ${{runner.temp}} | ||
- name: Select Python (Linux) | ||
if: matrix.os == 'linux' | ||
run: echo /opt/python/${{env.python_impl_name}}*/bin >> $GITHUB_PATH | ||
- name: Select Python (Windows) | ||
if: matrix.os == 'windows' | ||
uses: actions/[email protected] | ||
with: | ||
python-version: "3.${{inputs.python3}}" | ||
- name: Windows Pagefile | ||
if: matrix.os == 'windows' | ||
uses: al-cheb/[email protected] | ||
with: | ||
minimum-size: 2GB | ||
maximum-size: 8GB | ||
disk-root: "D:" # This is also the checkout directory. Total size 12GB. | ||
continue-on-error: true | ||
- name: Install npm # Linux github runner image does not come with npm | ||
if: matrix.os == 'linux' | ||
uses: actions/[email protected] | ||
with: | ||
node-version: '16' | ||
- name: Install the wheel and dependencies | ||
run: | | ||
npm install -g azurite | ||
cmake -P cpp/CMake/CpuCount.cmake | sed 's/^-- //' | tee -a $GITHUB_ENV | ||
python -V | ||
cd "$RUNNER_TEMP" # Works for Windows-style paths as well | ||
python -m pip install --force-reinstall $(ls *${{env.python_impl_name}}*.whl)[Testing] pytest-split | ||
if [[ -n "${{matrix.python_deps || ''}}" ]] ; then | ||
echo "Using deps ${{matrix.python_deps}}" | ||
python -m pip install --force-reinstall -r $GITHUB_WORKSPACE/build_tooling/${{matrix.python_deps}} | ||
fi | ||
python -m pip uninstall -y pytest-cpp || true # No longer works on 3.6 | ||
python -m pip list | ||
echo -e "${{matrix.envs || ''}}" | tee -a $GITHUB_ENV | ||
if [[ -n "$MSYSTEM" ]] ; then | ||
echo "LOCALAPPDATA=$LOCALAPPDATA" | tee -a $GITHUB_ENV | ||
fi | ||
${{vars.EXTRA_TEST_PREPARE_CMD || ''}} | ||
env: | ||
CMAKE_BUILD_PARALLEL_LEVEL: ${{vars.CMAKE_BUILD_PARALLEL_LEVEL}} | ||
- name: Set persistent storage variables | ||
if: inputs.persistent_storage == 'true' | ||
uses: ./.github/actions/set_persistent_storage_env_vars | ||
with: | ||
aws_access_key: "${{ secrets.AWS_S3_ACCESS_KEY }}" | ||
aws_secret_key: "${{ secrets.AWS_S3_SECRET_KEY }}" | ||
strategy_branch: "${{ env.distinguishing_name }}" | ||
- name: Run test | ||
run: | | ||
build_tooling/parallel_test.sh tests/${{matrix.type}} | ||
env: | ||
TEST_OUTPUT_DIR: ${{runner.temp}} | ||
# Use the Mongo created in the service container above to test against | ||
CI_MONGO_HOST: mongodb | ||
HYPOTHESIS_PROFILE: ci_${{matrix.os}} | ||
PYTEST_XDIST_MODE: ${{inputs.pytest_xdist_mode}} | ||
- name: Collect crash dumps (Windows) | ||
if: matrix.os == 'windows' && failure() | ||
uses: actions/[email protected] | ||
with: | ||
name: crashdump-${{env.distinguishing_name}} | ||
path: ${{env.LOCALAPPDATA}}/CrashDumps/ | ||
- name: Disk usage | ||
if: always() | ||
run: set +e ; du -m . "${PARALLEL_TEST_ROOT:-/tmp/parallel_test}" | sort -n | tail -n 100 || true; df -h | ||
continue-on-error: true | ||
- name: Upload the logs | ||
if: always() | ||
uses: actions/[email protected] | ||
with: | ||
name: pytest-${{env.distinguishing_name}} | ||
path: | | ||
${{runner.temp}}/*test* |