Cleaning up misc. code from debugging. RE:#1641 #1279
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Run Tests and Build | |
on: [push, pull_request] | |
defaults: | |
run: | |
shell: bash -l {0} | |
concurrency: | |
# make sure only one run of this workflow for a given PR or a given branch | |
# can happen at one time. previous queued or started runs will be cancelled. | |
# github.workflow is the workflow name | |
# github.ref is the ref that triggered the workflow run | |
# on push, this is refs/heads/<branch name> | |
# on pull request, this is refs/pull/<pull request number>/merge | |
group: ${{ github.workflow }}-${{ github.ref }} | |
cancel-in-progress: true | |
env: | |
# build: dependency of make install | |
# nomkl: make sure numpy w/out mkl | |
# setuptools_scm: needed for versioning to work | |
CONDA_DEFAULT_DEPENDENCIES: python-build nomkl setuptools_scm | |
LATEST_SUPPORTED_PYTHON_VERSION: "3.12" | |
jobs: | |
check-syntax-errors: | |
name: Check for syntax errors | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Set up python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ env.LATEST_SUPPORTED_PYTHON_VERSION }} | |
# this is fast enough that it's not worth caching | |
- name: Set up environment | |
run: pip install flake8 | |
- name: Lint with flake8 | |
run: | | |
# stop the build if there are Python syntax errors or undefined names | |
python -m flake8 src --count --select=E9,F63,F7,F82 --show-source --statistics | |
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide | |
python -m flake8 src --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics | |
check-rst-syntax: | |
name: Check RST syntax | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
- uses: actions/setup-python@v5 | |
name: Set up python | |
with: | |
python-version: ${{ env.LATEST_SUPPORTED_PYTHON_VERSION }} | |
- name: Set up environment | |
run: pip install doc8 | |
- name: Lint with doc8 | |
run: | | |
# Skip line-too-long errors (D001) | |
python -m doc8 --ignore D001 HISTORY.rst README_PYTHON.rst | |
run-model-tests: | |
name: Run model tests | |
runs-on: ${{ matrix.os }} | |
needs: check-syntax-errors | |
strategy: | |
fail-fast: false | |
matrix: | |
python-version: [3.8, 3.9, "3.10", "3.11", "3.12"] | |
os: [windows-latest, macos-13] | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # Fetch complete history for accurate versioning | |
# NOTE: It takes twice as long to save the sample data cache | |
# as it does to do a fresh clone (almost 5 minutes vs. 2.5 minutes) | |
# Test data is way, way faster by contrast (on the order of a few | |
# seconds to archive). | |
- name: Restore git-LFS test data cache | |
uses: actions/cache@v3 | |
with: | |
path: data/invest-test-data | |
key: git-lfs-testdata-${{ hashfiles('Makefile') }} | |
- name: Set up python environment | |
uses: ./.github/actions/setup_env | |
with: | |
python-version: ${{ matrix.python-version }} | |
requirements-files: requirements.txt requirements-dev.txt constraints_tests.txt | |
requirements: ${{ env.CONDA_DEFAULT_DEPENDENCIES }} | |
- name: Download previous conda environment.yml | |
continue-on-error: true | |
# Using 'dawidd6' since 'download-artifact' GH action doesn't | |
# support downloading artifacts from prior workflow runs | |
uses: dawidd6/action-download-artifact@v2 | |
with: | |
workflow: build-and-test.yml | |
# Get frozen conda env artifact from last successful workflow | |
workflow_conclusion: success | |
name: Conda Env for ${{ matrix.os }} ${{ matrix.python-version }} | |
path: ./conda-env-artifact | |
- name: Compare conda environments | |
run: | | |
micromamba list > conda-env.txt | |
# make sure that the exit code is always 0 | |
# otherwise, an error appears in the action annotations | |
diff ./conda-env.txt ./conda-env-artifact/conda-env.txt || true | |
- name: Build and install wheel | |
run: | | |
# uninstall InVEST if it was already in the restored cache | |
python -m pip uninstall -y natcap.invest | |
python -m build --wheel | |
ls -la dist | |
pip install $(find dist -name "natcap.invest*.whl") | |
- name: Run model tests | |
run: make test | |
- name: Upload wheel artifact | |
uses: actions/upload-artifact@v4 | |
with: | |
name: Wheel for ${{ matrix.os }} ${{ matrix.python-version }} | |
path: dist | |
- name: Upload conda env artifact | |
uses: actions/upload-artifact@v4 | |
continue-on-error: true | |
with: | |
name: Conda Env for ${{ matrix.os }} ${{ matrix.python-version }} | |
path: conda-env.txt | |
- name: Authenticate GCP | |
if: github.event_name != 'pull_request' | |
uses: google-github-actions/auth@v2 | |
with: | |
credentials_json: ${{ secrets.GOOGLE_SERVICE_ACC_KEY }} | |
- name: Set up GCP | |
if: github.event_name != 'pull_request' | |
uses: google-github-actions/setup-gcloud@v2 | |
- name: Deploy artifacts to GCS | |
if: github.event_name != 'pull_request' | |
run: make deploy | |
test-source-distribution: | |
name: Check sdist | |
runs-on: ${{ matrix.os }} | |
needs: check-syntax-errors | |
strategy: | |
matrix: | |
python-version: [3.8, 3.9, "3.10", "3.11", "3.12"] | |
os: [windows-latest, macos-13] | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # Fetch complete history for accurate versioning | |
- uses: ./.github/actions/setup_env | |
with: | |
python-version: ${{ matrix.python-version }} | |
requirements-files: requirements.txt | |
requirements: ${{ env.CONDA_DEFAULT_DEPENDENCIES }} twine | |
- name: Build source distribution | |
run: | | |
# Because we're using PEP518 build requirements, the user's | |
# computer is guaranteed to have cython available at build | |
# time. Thus, it is no longer necessary to distribute the | |
# .cpp files in addition to the .pyx files. | |
# | |
# Elevating any python warnings to errors to catch build issues ASAP. | |
python -W error -m build --sdist | |
- name: Install from source distribution | |
run : | | |
# Install natcap.invest from the sdist in dist/ | |
pip install $(find dist -name "natcap[._-]invest*") | |
# Model tests should cover model functionality, we just want | |
# to be sure that we can import `natcap.invest` here. | |
# The point here is to make sure that we can build | |
# natcap.invest from source and that it imports. | |
python -c "from natcap.invest import *" | |
- name: Check long description with twine | |
run: twine check $(find dist -name "natcap[._-]invest*") | |
- uses: actions/upload-artifact@v4 | |
with: | |
name: Source distribution ${{ matrix.os }} ${{ matrix.python-version }} | |
path: dist | |
# Secrets not available in PR so don't use GCP. | |
# Only upload sdist in one of the matrix cases so we don't | |
# overwrite artifacts or have duplicates (mac/windows sdists have | |
# different extensions) | |
- name: Authenticate GCP | |
if: github.event_name != 'pull_request' && matrix.os == 'macos-13' && matrix.python-version == env.LATEST_SUPPORTED_PYTHON_VERSION | |
uses: google-github-actions/auth@v2 | |
with: | |
credentials_json: ${{ secrets.GOOGLE_SERVICE_ACC_KEY }} | |
- name: Set up GCP | |
if: github.event_name != 'pull_request' && matrix.os == 'macos-13' && matrix.python-version == env.LATEST_SUPPORTED_PYTHON_VERSION | |
uses: google-github-actions/setup-gcloud@v2 | |
- name: Deploy artifacts to GCS | |
if: github.event_name != 'pull_request' && matrix.os == 'macos-13' && matrix.python-version == env.LATEST_SUPPORTED_PYTHON_VERSION | |
run: make deploy | |
validate-resources: | |
name: Validate Sampledata & User Guide | |
runs-on: windows-latest | |
needs: check-syntax-errors | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # Fetch complete history for accurate versioning | |
- uses: ./.github/actions/setup_env | |
with: | |
python-version: ${{ env.LATEST_SUPPORTED_PYTHON_VERSION }} | |
requirements-files: requirements.txt | |
requirements: ${{ env.CONDA_DEFAULT_DEPENDENCIES }} pytest | |
- name: Make install | |
run: make install | |
- name: Validate sample data | |
run: make validate_sampledata | |
- name: Validate user guide links | |
run: make validate_userguide_filenames | |
run-workbench-tests: | |
name: Run Workbench Tests | |
runs-on: ${{ matrix.os }} | |
needs: check-syntax-errors | |
strategy: | |
fail-fast: false | |
max-parallel: 4 | |
matrix: | |
os: [windows-latest, macos-13] | |
steps: | |
- name: Check out repo | |
uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # Fetch complete history for accurate versioning | |
- name: Set up python environment | |
uses: ./.github/actions/setup_env | |
with: | |
python-version: ${{ env.LATEST_SUPPORTED_PYTHON_VERSION }} | |
requirements-files: requirements.txt requirements-dev.txt | |
requirements: ${{ env.CONDA_DEFAULT_DEPENDENCIES }} | |
- name: Make install | |
run: make install | |
- name: Set up node | |
uses: actions/setup-node@v3 | |
with: | |
node-version: 20 | |
- name: Restore node_modules cache | |
id: nodemodules-cache | |
uses: actions/cache@v3 | |
with: | |
path: workbench/node_modules | |
key: ${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('workbench/yarn.lock') }} | |
- name: Install workbench dependencies | |
if: steps.nodemodules-cache.outputs.cache-hit != 'true' | |
working-directory: workbench | |
run: | | |
yarn config set network-timeout 600000 -g | |
yarn install | |
- name: Run workbench tests | |
working-directory: workbench | |
env: | |
CI: true | |
run: yarn test | |
build-binaries: | |
name: Build binaries | |
needs: check-syntax-errors | |
runs-on: ${{ matrix.os }} | |
strategy: | |
fail-fast: false | |
matrix: | |
os: [macos-13, windows-latest] | |
include: | |
- os: macos-13 | |
puppeteer-log: ~/Library/Logs/invest-workbench/ | |
workspace-path: InVEST-failed-mac-workspace.tar | |
binary-extension: dmg | |
- os: windows-latest | |
puppeteer-log: ~/AppData/Roaming/invest-workbench/logs/ | |
workspace-path: ${{ github.workspace }} | |
binary-extension: exe | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # Fetch complete history for accurate versioning | |
- name: Set up conda environment | |
uses: ./.github/actions/setup_env | |
with: | |
python-version: ${{ env.LATEST_SUPPORTED_PYTHON_VERSION }} | |
requirements-files: | | |
requirements.txt | |
requirements-dev.txt | |
requirements-docs.txt | |
constraints_tests.txt | |
requirements: ${{ env.CONDA_DEFAULT_DEPENDENCIES }} pandoc | |
- name: Make install | |
run: make install | |
# Not caching chocolatey packages because the cache may not be reliable | |
# https://github.com/chocolatey/choco/issues/2134 | |
# and this step only takes about 30 seconds. | |
- name: Install build dependencies (Windows) | |
if: matrix.os == 'windows-latest' | |
shell: powershell | |
run: | | |
choco install zip unzip | |
$env:PATH += ";C:\ProgramData\chocolatey\bin" | |
Import-Module $env:ChocolateyInstall\helpers\chocolateyProfile.psm1 | |
refreshenv # Choco-provided command to reload environment variables | |
- name: Build userguide | |
run: make userguide | |
- name: Build binaries | |
run: make CONDA="$MAMBA_EXE" binaries | |
- name: Install Node.js | |
uses: actions/setup-node@v3 | |
with: | |
node-version: 18 | |
- name: Restore node_modules cache | |
id: nodemodules-cache | |
uses: actions/cache@v3 | |
with: | |
path: workbench/node_modules | |
key: ${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('workbench/yarn.lock') }} | |
- name: Install Workbench Dependencies | |
if: steps.nodemodules-cache.outputs.cache-hit != 'true' | |
working-directory: workbench | |
run: | | |
yarn config set network-timeout 600000 -g | |
yarn install | |
- name: Authenticate GCP | |
if: github.event_name != 'pull_request' | |
uses: google-github-actions/auth@v2 | |
with: | |
credentials_json: ${{ secrets.GOOGLE_SERVICE_ACC_KEY }} | |
- name: Set up GCP | |
if: github.event_name != 'pull_request' | |
uses: google-github-actions/setup-gcloud@v2 | |
- name: Build Workbench (PRs) | |
if: github.event_name == 'pull_request' | |
working-directory: workbench | |
env: | |
GH_TOKEN: env.GITHUB_TOKEN | |
DEBUG: electron-builder | |
CSC_IDENTITY_AUTO_DISCOVERY: false # disable electron-builder code signing | |
run: | | |
yarn run build | |
yarn run dist | |
- name: Build Workbench (macOS) | |
if: github.event_name != 'pull_request' && matrix.os == 'macos-13' # secrets not available in PR | |
working-directory: workbench | |
env: | |
GH_TOKEN: env.GITHUB_TOKEN | |
DEBUG: electron-builder | |
CSC_LINK: 2025-01-16-Expiry-AppStore-App.p12 | |
CSC_KEY_PASSWORD: ${{ secrets.MACOS_CODESIGN_CERT_PASS }} | |
run: | | |
gsutil cp gs://stanford_cert/$CSC_LINK $CSC_LINK | |
yarn run build | |
yarn run dist | |
- name: Build Workbench (Windows) | |
if: github.event_name != 'pull_request' && matrix.os == 'windows-latest' # secrets not available in PR | |
working-directory: workbench | |
env: | |
GH_TOKEN: env.GITHUB_TOKEN | |
DEBUG: electron-builder | |
#CSC_LINK: Stanford-natcap-code-signing-cert-expires-2024-01-26.p12 | |
#CSC_KEY_PASSWORD: ${{ secrets.WINDOWS_CODESIGN_CERT_PASS }} | |
run: | | |
#gsutil cp gs://stanford_cert/$CSC_LINK $CSC_LINK | |
yarn run build | |
yarn run dist | |
- name: Test electron app with puppeteer | |
working-directory: workbench | |
run: npx cross-env CI=true yarn run test-electron-app | |
- name: Sign binaries (macOS) | |
if: github.event_name != 'pull_request' && matrix.os == 'macos-13' # secrets not available in PR | |
env: | |
CERT_FILE: 2025-01-16-Expiry-AppStore-App.p12 | |
CERT_PASS: ${{ secrets.MACOS_CODESIGN_CERT_PASS }} | |
run: | | |
WORKBENCH_BINARY=$(find "$(pwd)/workbench/dist" -type f -name 'invest_*.dmg' | head -n 1) | |
make WORKBENCH_BIN_TO_SIGN="$WORKBENCH_BINARY" codesign_mac | |
#- name: Sign binaries (Windows) | |
# if: github.event_name != 'pull_request' && matrix.os == 'windows-latest' # secrets not available in PR | |
# env: | |
# CERT_FILE: Stanford-natcap-code-signing-cert-expires-2024-01-26.p12 | |
# CERT_PASS: ${{ secrets.WINDOWS_CODESIGN_CERT_PASS }} | |
# run: | | |
# # figure out the path to signtool.exe (it keeps changing with SDK updates) | |
# SIGNTOOL_PATH=$(find 'C:\\Program Files (x86)\\Windows Kits\\10' -type f -name 'signtool.exe*' | head -n 1) | |
# WORKBENCH_BINARY=$(find "$(pwd)/workbench/dist" -type f -name 'invest_*.exe' | head -n 1) | |
# make WORKBENCH_BIN_TO_SIGN="$WORKBENCH_BINARY" SIGNTOOL="$SIGNTOOL_PATH" codesign_windows | |
- name: Deploy artifacts to GCS | |
if: github.event_name != 'pull_request' | |
run: make deploy | |
- name: Upload workbench binary artifact | |
if: always() | |
uses: actions/upload-artifact@v4 | |
with: | |
name: Workbench-${{ runner.os }}-binary | |
path: workbench/dist/*.${{ matrix.binary-extension }} | |
- name: Upload user's guide artifact (Windows) | |
if: matrix.os == 'windows-latest' | |
uses: actions/upload-artifact@v4 | |
with: | |
name: InVEST-user-guide | |
path: dist/InVEST_*_userguide.zip | |
- name: Upload workbench logging from puppeteer | |
uses: actions/upload-artifact@v4 | |
if: always() | |
with: | |
name: ${{ runner.os }}_puppeteer_log.zip' | |
path: ${{ matrix.puppeteer-log }} | |
- name: Run invest-autotest with binaries | |
if : | | |
(github.event_name == 'push' && | |
(startsWith(github.ref, 'refs/heads/release') || github.ref == 'refs/heads/main')) || | |
(github.event_name == 'pull_request' && startsWith(github.head_ref, 'autorelease')) | |
run: make invest_autotest | |
- name: Tar the workspace to preserve permissions (macOS) | |
if: failure() && matrix.os == 'macos-13' | |
run: tar -cvf ${{ matrix.workspace-path}} ${{ github.workspace }} | |
- name: Upload workspace on failure | |
if: ${{ failure() }} | |
uses: actions/upload-artifact@v4 | |
with: | |
name: InVEST-failed-${{ runner.os }}-workspace | |
path: ${{ matrix.workspace-path}} | |
build-sampledata: | |
name: Build sampledata archives | |
needs: check-syntax-errors | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # Fetch complete history for accurate versioning | |
- uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ env.LATEST_SUPPORTED_PYTHON_VERSION }} | |
- name: Install dependencies | |
# dependencies of setup.py, needed to get the version string | |
run: pip install babel cython numpy setuptools setuptools_scm wheel | |
- run: make sampledata sampledata_single | |
- name: Upload sample data artifact | |
uses: actions/upload-artifact@v4 | |
with: | |
name: InVEST-sample-data | |
path: dist/*.zip | |
- name: Authenticate GCP | |
if: github.event_name != 'pull_request' | |
uses: google-github-actions/auth@v2 | |
with: | |
credentials_json: ${{ secrets.GOOGLE_SERVICE_ACC_KEY }} | |
- name: Set up GCP | |
if: github.event_name != 'pull_request' | |
uses: google-github-actions/setup-gcloud@v2 | |
- name: Deploy artifacts to GCS | |
if: github.event_name != 'pull_request' | |
run: make deploy |