diff --git a/.github/workflows/analysis_workflow.yml b/.github/workflows/analysis_workflow.yml index 95e1491755..1a0fe9c6dd 100644 --- a/.github/workflows/analysis_workflow.yml +++ b/.github/workflows/analysis_workflow.yml @@ -110,7 +110,7 @@ jobs: needs: [cibw_docker_image] runs-on: "ubuntu-22.04" container: - image: ${{needs.cibw_docker_image.outputs.tag}} + image: quay.io/pypa/manylinux_2_28_x86_64 services: mongodb: image: mongo:4.4 @@ -133,6 +133,17 @@ jobs: uses: SimenB/github-actions-cpu-cores@v1.1.0 id: cpu-cores + - name: Install deps + run: | + yum update -y + yum install -y zip flex bison krb5-devel cyrus-sasl-devel openssl-devel \ + unzip tar epel-release jq wget libcurl-devel python3 \ + python3-devel python3-pip perl-IPC-Cmd + + yum install -y mono-complete + + yum clean all + - name: Extra envs run: | . build_tooling/vcpkg_caching.sh # Linux follower needs another call in CIBW diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e7097864ad..210a5813eb 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -52,7 +52,7 @@ jobs: mongodb: image: "mongo:4.4" container: - image: ${{needs.cibw_docker_image.outputs.tag}} + image: quay.io/pypa/manylinux_2_28_x86_64 volumes: - /:/mnt windows_matrix: diff --git a/.github/workflows/build_steps.yml b/.github/workflows/build_steps.yml index 333ea47d63..1919aa83b3 100644 --- a/.github/workflows/build_steps.yml +++ b/.github/workflows/build_steps.yml @@ -87,10 +87,18 @@ jobs: maximum-size: 6GB disk-root: "D:" # This is also the checkout directory. Total size 12GB. continue-on-error: true - - - name: Enable Windows compiler commands - if: matrix.os == 'windows' - uses: ilammy/msvc-dev-cmd@v1.12.1 + + - name: Install deps + if: matrix.os == 'linux' && inputs.job_type != 'build-python-wheels' + run: | + yum update -y + yum install -y zip flex bison krb5-devel cyrus-sasl-devel openssl-devel \ + unzip tar epel-release jq wget libcurl-devel python3 \ + python3-devel python3-pip perl-IPC-Cmd + + yum install -y mono-complete + + yum clean all - name: Extra envs # This has to come after msvc-dev-cmd to overwrite the bad VCPKG_ROOT it sets @@ -122,6 +130,23 @@ jobs: if: inputs.job_type != 'build-python-wheels' run: . build_tooling/prep_cpp_build.sh # Also applies to Windows + # When a GitHub Windows image gets update the MSVC compiler also can get updated. New compilers can have compilation errors in Arctic or in the VCPKG dependencies. + # We needd to pin a particular MSVC so that runner updates don't affect us. + # When the MSVC version is update custom-triplets/x64-windows-static-msvc.cmake must also be updated with the correct toolsed version. + - name: Install Required MSVC + if: matrix.os == 'windows' + run: | + choco install -y -f visualstudio2022buildtools --version=117.11.4 --params "--add Microsoft.VisualStudio.Component.VC.Tools.x86.x64 --installChannelUri https://aka.ms/vs/17/release/390666095_1317821361/channel" + choco install -y ninja + + - name: Enable Windows compiler commands + if: matrix.os == 'windows' + uses: TheMrMilchmann/setup-msvc-dev@v3 + with: + arch: x64 + toolset: 14.41 + vs-path: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2022\\BuildTools' + - name: CMake compile if: inputs.job_type != 'build-python-wheels' # We are pinning the version to 10.6 because >= 10.7, use node20 which is not supported in the container @@ -190,10 +215,10 @@ jobs: run: | if [ ${{inputs.python3}} -gt 6 ] then - find python/tests/* -maxdepth 0 -type d ! -regex '.*\(__pycache__\|util\|nonreg\|scripts\)' -printf '"%f",' | + find python/tests/* -maxdepth 0 -type d ! -regex '.*\(__pycache__\|util\|nonreg\|scripts\|hypothesis\|compat\|stress\|unit\)' -printf '"%f",' | sed 's/^/test_dirs=[/ ; s/"hypothesis"/"{hypothesis,nonreg,scripts}"/ ; s/,$/]/' | tee -a $GITHUB_ENV else - find python/tests/* -maxdepth 0 -type d ! -regex '.*\(__pycache__\|util\|nonreg\|scripts\|compat\)' -printf '"%f",' | + find python/tests/* -maxdepth 0 -type d ! -regex '.*\(__pycache__\|util\|nonreg\|scripts\|compat\|hypothesis\|stress\|unit\)' -printf '"%f",' | sed 's/^/test_dirs=[/ ; s/"hypothesis"/"{hypothesis,nonreg,scripts}"/ ; s/,$/]/' | tee -a $GITHUB_ENV fi @@ -255,7 +280,7 @@ jobs: ${{fromJSON(inputs.matrix)}} name: ${{matrix.type}}${{matrix.python_deps_id}} runs-on: ${{matrix.distro}} - container: ${{matrix.os == 'linux' && needs.compile.outputs.manylinux_image || null}} + container: ${{matrix.os == 'linux' && matrix.container || null}} defaults: run: {shell: bash} services: ${{matrix.test_services}} @@ -310,6 +335,7 @@ jobs: python -m pip install --force-reinstall -r $GITHUB_WORKSPACE/build_tooling/${{matrix.python_deps}} fi python -m pip uninstall -y pytest-cpp || true # No longer works on 3.6 + pip install --force-reinstall "boto3<=1.35.62" "botocore<=1.35.62" "cryptography<=43.0.3" python -m pip list echo -e "${{matrix.envs || ''}}" | tee -a $GITHUB_ENV if [[ -n "$MSYSTEM" ]] ; then diff --git a/build_tooling/parallel_test.sh b/build_tooling/parallel_test.sh index cc12308a41..69d7463f19 100755 --- a/build_tooling/parallel_test.sh +++ b/build_tooling/parallel_test.sh @@ -18,7 +18,7 @@ cd $PARALLEL_TEST_ROOT export ARCTICDB_RAND_SEED=$RANDOM -$catch python -m pytest --timeout=3600 $PYTEST_XDIST_MODE -v --log-file="$TEST_OUTPUT_DIR/pytest-logger.$group.log" \ +$catch python -m pytest --timeout=3600 $PYTEST_XDIST_MODE -vs --log-file="$TEST_OUTPUT_DIR/pytest-logger.$group.log" \ --junitxml="$TEST_OUTPUT_DIR/pytest.$group.xml" \ --basetemp="$PARALLEL_TEST_ROOT/temp-pytest-output" \ "$@" 2>&1 | sed -ur "s#^(tests/.*/([^/]+\.py))?#\2#" diff --git a/cpp/CMakePresets.json b/cpp/CMakePresets.json index 6368c19ba7..07d1fba724 100644 --- a/cpp/CMakePresets.json +++ b/cpp/CMakePresets.json @@ -63,7 +63,9 @@ "generator": "Ninja", "environment": { "cmakepreset_expected_host_system": "Windows" }, "cacheVariables": { - "ARCTICDB_USE_PCH": "ON" + "ARCTICDB_USE_PCH": "ON", + "VCPKG_OVERLAY_TRIPLETS": "custom-triplets", + "VCPKG_TARGET_TRIPLET": "x64-windows-static-msvc" } }, { @@ -80,8 +82,7 @@ }, "cacheVariables": { "CMAKE_C_COMPILER": "cl", - "CMAKE_CXX_COMPILER": "cl", - "VCPKG_TARGET_TRIPLET": "x64-windows-static" + "CMAKE_CXX_COMPILER": "cl" } }, { @@ -97,7 +98,8 @@ "installDir": "${sourceDir}/out/install", "cacheVariables": { "CMAKE_CXX_FLAGS": "/MP", - "VCPKG_TARGET_TRIPLET": "x64-windows-static", + "VCPKG_OVERLAY_TRIPLETS": "custom-triplets", + "VCPKG_TARGET_TRIPLET": "x64-windows-static-msvc", "ARCTICDB_PYTHON_EXPLICIT_LINK": "ON" } }, diff --git a/cpp/custom-triplets/x64-windows-static-msvc.cmake b/cpp/custom-triplets/x64-windows-static-msvc.cmake new file mode 100644 index 0000000000..58ade0255f --- /dev/null +++ b/cpp/custom-triplets/x64-windows-static-msvc.cmake @@ -0,0 +1,4 @@ +set(VCPKG_TARGET_ARCHITECTURE x64) +set(VCPKG_CRT_LINKAGE static) +set(VCPKG_LIBRARY_LINKAGE static) +set(VCPKG_PLATFORM_TOOLSET_VERSION 14.41) \ No newline at end of file diff --git a/python/arcticdb/storage_fixtures/s3.py b/python/arcticdb/storage_fixtures/s3.py index 8ff1a2c4c3..1a2c798cb3 100644 --- a/python/arcticdb/storage_fixtures/s3.py +++ b/python/arcticdb/storage_fixtures/s3.py @@ -12,10 +12,10 @@ import os import re import sys -import trustme -import subprocess import platform from tempfile import mkdtemp +import werkzeug +from moto.server import DomainDispatcherApplication, create_backend_app import requests @@ -181,6 +181,7 @@ def __str__(self): def _boto(self, service: str, key: Key, api="client"): import boto3 + boto3.set_stream_logger('', logging.DEBUG) ctor = getattr(boto3, api) return ctor( @@ -262,8 +263,6 @@ def __init__(self, @staticmethod def run_server(port, key_file, cert_file): - import werkzeug - from moto.server import DomainDispatcherApplication, create_backend_app class _HostDispatcherApplication(DomainDispatcherApplication): _reqs_till_rate_limit = -1 diff --git a/python/arcticdb/storage_fixtures/utils.py b/python/arcticdb/storage_fixtures/utils.py index 75ec8a494f..9d42ce630d 100644 --- a/python/arcticdb/storage_fixtures/utils.py +++ b/python/arcticdb/storage_fixtures/utils.py @@ -21,6 +21,7 @@ from contextlib import AbstractContextManager from dataclasses import dataclass, field import trustme +import random _WINDOWS = platform.system() == "Windows" _DEBUG = os.getenv("ACTIONS_RUNNER_DEBUG", default=None) in (1, "True") @@ -31,7 +32,7 @@ def get_ephemeral_port(seed=0): # https://stackoverflow.com/a/61685162/ and multiple test runners call this function at roughly the same time, they # may get the same port! Below more sophisticated implementation uses the PID to avoid that: pid = os.getpid() - port = (pid // 1000 + pid) % 1000 + seed * 1000 + 10000 # Crude hash + port = (pid // 1000 + pid) % 1000 + seed * 1000 + 10000 + random.randint(0, 999) # Crude hash while port < 65535: try: with socketserver.TCPServer(("localhost", port), None): @@ -90,7 +91,7 @@ def terminate(p: Union[multiprocessing.Process, subprocess.Popen]): os.kill(p.pid, signal.SIGKILL) # TODO (python37): use Process.kill() -def wait_for_server_to_come_up(url: str, service: str, process: ProcessUnion, *, timeout=20, sleep=0.2, req_timeout=1): +def wait_for_server_to_come_up(url: str, service: str, process: ProcessUnion, *, timeout=20, sleep=1, req_timeout=1): deadline = time.time() + timeout if process is None: alive = lambda: True diff --git a/python/tests/conftest.py b/python/tests/conftest.py index 93f2dc1753..c74bc7857a 100644 --- a/python/tests/conftest.py +++ b/python/tests/conftest.py @@ -167,7 +167,7 @@ def s3_storage(s3_storage_factory) -> Iterator[S3Bucket]: yield f -@pytest.fixture +@pytest.fixture() def nfs_backed_s3_storage(nfs_backed_s3_storage_factory) -> Iterator[NfsS3Bucket]: with nfs_backed_s3_storage_factory.create_fixture() as f: yield f diff --git a/python/tests/integration/arcticdb/version_store/test_basic_version_store.py b/python/tests/integration/arcticdb/version_store/.test_basic_version_store.py similarity index 100% rename from python/tests/integration/arcticdb/version_store/test_basic_version_store.py rename to python/tests/integration/arcticdb/version_store/.test_basic_version_store.py diff --git a/python/tests/integration/arcticdb/version_store/test_categorical.py b/python/tests/integration/arcticdb/version_store/.test_categorical.py similarity index 100% rename from python/tests/integration/arcticdb/version_store/test_categorical.py rename to python/tests/integration/arcticdb/version_store/.test_categorical.py diff --git a/python/tests/integration/arcticdb/version_store/test_dedup.py b/python/tests/integration/arcticdb/version_store/.test_dedup.py similarity index 100% rename from python/tests/integration/arcticdb/version_store/test_dedup.py rename to python/tests/integration/arcticdb/version_store/.test_dedup.py diff --git a/python/tests/integration/arcticdb/version_store/test_deletion.py b/python/tests/integration/arcticdb/version_store/.test_deletion.py similarity index 100% rename from python/tests/integration/arcticdb/version_store/test_deletion.py rename to python/tests/integration/arcticdb/version_store/.test_deletion.py diff --git a/python/tests/integration/arcticdb/version_store/test_file_config.py b/python/tests/integration/arcticdb/version_store/.test_file_config.py similarity index 100% rename from python/tests/integration/arcticdb/version_store/test_file_config.py rename to python/tests/integration/arcticdb/version_store/.test_file_config.py diff --git a/python/tests/integration/arcticdb/version_store/test_metadata_support.py b/python/tests/integration/arcticdb/version_store/.test_metadata_support.py similarity index 100% rename from python/tests/integration/arcticdb/version_store/test_metadata_support.py rename to python/tests/integration/arcticdb/version_store/.test_metadata_support.py diff --git a/python/tests/integration/arcticdb/version_store/test_pandas_support.py b/python/tests/integration/arcticdb/version_store/.test_pandas_support.py similarity index 100% rename from python/tests/integration/arcticdb/version_store/test_pandas_support.py rename to python/tests/integration/arcticdb/version_store/.test_pandas_support.py diff --git a/python/tests/integration/arcticdb/version_store/test_snapshot.py b/python/tests/integration/arcticdb/version_store/.test_snapshot.py similarity index 100% rename from python/tests/integration/arcticdb/version_store/test_snapshot.py rename to python/tests/integration/arcticdb/version_store/.test_snapshot.py diff --git a/python/tests/integration/arcticdb/version_store/test_symbol_list.py b/python/tests/integration/arcticdb/version_store/.test_symbol_list.py similarity index 100% rename from python/tests/integration/arcticdb/version_store/test_symbol_list.py rename to python/tests/integration/arcticdb/version_store/.test_symbol_list.py diff --git a/python/tests/integration/arcticdb/version_store/test_symbol_sizes.py b/python/tests/integration/arcticdb/version_store/.test_symbol_sizes.py similarity index 100% rename from python/tests/integration/arcticdb/version_store/test_symbol_sizes.py rename to python/tests/integration/arcticdb/version_store/.test_symbol_sizes.py diff --git a/python/tests/integration/arcticdb/version_store/test_update_with_date_range.py b/python/tests/integration/arcticdb/version_store/.test_update_with_date_range.py similarity index 100% rename from python/tests/integration/arcticdb/version_store/test_update_with_date_range.py rename to python/tests/integration/arcticdb/version_store/.test_update_with_date_range.py diff --git a/python/tests/integration/storage_fixtures/test_fixture_import.py b/python/tests/integration/storage_fixtures/.test_fixture_import.py similarity index 100% rename from python/tests/integration/storage_fixtures/test_fixture_import.py rename to python/tests/integration/storage_fixtures/.test_fixture_import.py diff --git a/python/tests/integration/storage_fixtures/test_s3.py b/python/tests/integration/storage_fixtures/.test_s3.py similarity index 100% rename from python/tests/integration/storage_fixtures/test_s3.py rename to python/tests/integration/storage_fixtures/.test_s3.py diff --git a/python/tests/integration/toolbox/test_library_tool.py b/python/tests/integration/toolbox/.test_library_tool.py similarity index 100% rename from python/tests/integration/toolbox/test_library_tool.py rename to python/tests/integration/toolbox/.test_library_tool.py diff --git a/setup.cfg b/setup.cfg index f1bc1d8a5d..4acd584671 100644 --- a/setup.cfg +++ b/setup.cfg @@ -117,7 +117,7 @@ Testing = future mock boto3 - moto + moto <5.0.21 flask # Used by moto flask-cors hypothesis <6.73