Skip to content

UAT C1264340845-EEDTEST (MOP02J) #44184

UAT C1264340845-EEDTEST (MOP02J)

UAT C1264340845-EEDTEST (MOP02J) #44184

Workflow file for this run

# This workflow ...
name: Verify New Associations
on:
pull_request:
types: [ synchronize ]
jobs:
verify_collection:
outputs:
any_test_failed: ${{ steps.test-result.outputs.any_test_failed }}
any_test_skipped: ${{ steps.test-result.outputs.any_test_skipped }}
any_test_error: ${{ steps.test-result.outputs.any_test_error }}
all_tests_passed: ${{ steps.test-result.outputs.all_tests_passed }}
permissions:
checks: write
# only needed unless run with comment_mode: off
pull-requests: write
if: |
${{ startsWith(github.head_ref, 'diff/uat') }} ||
${{ startsWith(github.head_ref, 'diff/ops') }} ||
contains(github.event.issue.labels.*.name, 'autotest')
name: Verify ${{ github.event.pull_request.title }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install Poetry
uses: abatilo/actions-poetry@v2
with:
poetry-version: 1.5.1
- name: Get env and concept id
id: get-env-ccid
run: |
echo "test_env=$(python -c "print('${{ github.head_ref }}'.split('/')[1])")" >> $GITHUB_OUTPUT
echo "concept_id=$(python -c "print('${{ github.head_ref }}'.split('/')[2])")" >> $GITHUB_OUTPUT
- name: install
id: install
run: |
poetry install
- name: Execute tests
id: run-tests
working-directory: tests
env:
TEST_ENV: ${{ steps.get-env-ccid.outputs.test_env }}
CONCEPT_ID: ${{ steps.get-env-ccid.outputs.concept_id }}
CMR_USER: ${{ secrets.CMR_USER }}
CMR_PASS: ${{ secrets.CMR_PASS }}
run: |
poetry run pytest verify_collection.py --concept_id $CONCEPT_ID --env $TEST_ENV --junitxml=$GITHUB_WORKSPACE/test-results/test_report.xml --html=$GITHUB_WORKSPACE/test-results/test_report.html || true
- name: Publish Test Results
id: publish-test
uses: EnricoMi/publish-unit-test-result-action@v2
if: always()
with:
report_suite_logs: info
check_name: Tested with Harmony
json_file: test-results/test_results.json
comment_title: Test Results for ${{ steps.get-env-ccid.outputs.concept_id }}
files: test-results/test_report.xml
- name: Save Test Outputs
id: test-result
if: always()
run: |
echo "any_test_failed=${{ fromJSON( steps.publish-test.outputs.json ).stats.tests_fail > 0 }}" >> $GITHUB_OUTPUT
echo "any_test_skipped=${{ fromJSON( steps.publish-test.outputs.json ).stats.tests_skip > 0 }}" >> $GITHUB_OUTPUT
echo "any_test_error=${{ fromJSON( steps.publish-test.outputs.json ).stats.tests_error > 0 }}" >> $GITHUB_OUTPUT
echo "any_test_passed=${{ fromJSON( steps.publish-test.outputs.json ).stats.tests_succ > 0 }}" >> $GITHUB_OUTPUT
echo "all_tests_succeeded_or_skipped=${{ fromJSON( steps.publish-test.outputs.json ).stats.tests_succ > 0 && fromJSON( steps.publish-test.outputs.json ).stats.tests_fail == 0 && fromJSON( steps.publish-test.outputs.json ).stats.tests_error == 0 }}" >> $GITHUB_ENV
- uses: actions/upload-artifact@v3
if: always()
with:
name: test-results
path: test-results/**
- name: Succeeded in Completing All Tests
uses: LouisBrunner/[email protected]
if: env.all_tests_succeeded_or_skipped == 'true'
with:
token: ${{ secrets.GITHUB_TOKEN }}
name: All Tests Run and Completed (Passed and/or Skipped)
conclusion: success
output: |
{"summary":"No failed or errored tests; at least one test passed, with some skipped tests."}
tests_pass:
needs: verify_collection
name: Merge Pull Request
runs-on: ubuntu-latest
if: fromJSON( needs.verify_collection.outputs.all_tests_passed )
steps:
- uses: actions-ecosystem/action-remove-labels@v1
with:
labels: |
unverified
- name: add labels
uses: actions-ecosystem/action-add-labels@v1
with:
labels: |
verified
tests_fail:
needs: verify_collection
name: Fail Pull Request
runs-on: ubuntu-latest
if: |
fromJSON( needs.verify_collection.outputs.any_test_failed ) ||
fromJSON( needs.verify_collection.outputs.any_test_error )
steps:
- uses: actions-ecosystem/action-remove-labels@v1
with:
labels: |
unverified
- name: add labels
uses: actions-ecosystem/action-add-labels@v1
with:
labels: |
failed verification
bug
- uses: mshick/add-pr-comment@v2
with:
message: |
**Action Needed**
Tests have failed or encountered an unknown error. Open the status checks to view the logs and review the failure.
tests_skipped:
needs: verify_collection
name: Update Pull Request
runs-on: ubuntu-latest
if: fromJSON( needs.verify_collection.outputs.any_test_skipped )
steps:
- uses: actions-ecosystem/action-remove-labels@v1
with:
labels: |
unverified
- name: add labels
uses: actions-ecosystem/action-add-labels@v1
with:
labels: |
tests skipped
- uses: mshick/add-pr-comment@v2
with:
message: |
**Action Needed**
Unable to verify collection because test(s) were skipped.
Open the status check to view the logs from the test run and determine why tests were skipped.
If there is a justifiable reason, add a comment here and manually merge this PR to accept this unverified association.
Otherwise, fix the problem that caused the test to be skipped and rerun verification.