Skip to content

Commit

Permalink
move to same block
Browse files Browse the repository at this point in the history
  • Loading branch information
nikellepetrillo committed Dec 10, 2024
1 parent c0f5000 commit b3153d0
Showing 1 changed file with 120 additions and 75 deletions.
195 changes: 120 additions & 75 deletions .github/workflows/test_illumina_genotyping_array.yml
Original file line number Diff line number Diff line change
Expand Up @@ -134,17 +134,32 @@ jobs:
- name: Update test inputs and Upload to Terra
run: |
UPDATE_TRUTH="${{ github.event.inputs.updateTruth || 'false' }}"
USE_CALL_CACHE="${{ github.event.inputs.useCallCache || 'true' }}"
TRUTH_BRANCH="${{ github.event.inputs.truthBranch || 'master' }}"
CURRENT_TIME=$(date +"%Y-%m-%d-%H-%M-%S")
MAX_RETRIES=2
RETRY_DELAY=300 # 300 seconds = 5 minutes
# Initialize variables to aggregate statuses and outputs
ALL_WORKFLOW_STATUSES="Workflow ID | Status"$'\n'"--- | ---"
ALL_OUTPUTS=""
# Initialize arrays to track submission and workflow statuses
declare -a SUBMISSION_IDS
declare -A WORKFLOW_STATUSES
# Convert UPDATE_TRUTH to a boolean-friendly format ("true" -> true, "false" -> false)
# Convert UPDATE_TRUTH and USE_CALL_CACHE to a boolean-friendly format ("true" -> true, "false" -> false)
if [ "$UPDATE_TRUTH" = "true" ]; then
UPDATE_TRUTH_BOOL=true
else
UPDATE_TRUTH_BOOL=false
fi
if [ "$USE_CALL_CACHE" == "true" ]; then
USE_CALL_CACHE_BOOL=true
else
USE_CALL_CACHE_BOOL=false
fi
PIPELINE_NAME="TestIlluminaGenotypingArray"
PIPELINE_DIR="pipelines/broad/genotyping/illumina"
TEST_TYPE="${{ env.testType }}"
Expand All @@ -156,14 +171,14 @@ jobs:
RESULTS_PATH="gs://broad-gotc-test-storage/IlluminaGenotypingArray/results/$CURRENT_TIME"
for input_file in "$INPUTS_DIR"/*.json; do
echo "Processing input file: $input_file"
test_input_file=$(python3 scripts/firecloud_api/UpdateTestInputs.py --truth_path "$TRUTH_PATH" \
--results_path "$RESULTS_PATH" \
--inputs_json "$input_file" \
--update_truth "$UPDATE_TRUTH_BOOL" \
--commit_hash "$COMMIT_HASH" )
echo "Uploading the test input file: $test_input_file"
echo "Branch name: $branch_name"
echo "Processing input file: $input_file"
test_input_file=$(python3 scripts/firecloud_api/UpdateTestInputs.py --truth_path "$TRUTH_PATH" \
--results_path "$RESULTS_PATH" \
--inputs_json "$input_file" \
--update_truth "$UPDATE_TRUTH_BOOL" \
--commit_hash "$COMMIT_HASH" )
echo "Uploading the test input file: $test_input_file"
echo "Branch name: $branch_name"
python3 scripts/firecloud_api/firecloud_api2.py \
upload_test_inputs \
Expand All @@ -174,27 +189,9 @@ jobs:
--branch_name "$branch_name" \
--sa-json-b64 "$SA_JSON_B64" \
--user "[email protected]"
done
- name: Submit to Terra and Monitor Workflows
run: |
# Set common environment variables
MAX_RETRIES=2
RETRY_DELAY=300 # 300 seconds = 5 minutes
PIPELINE_NAME="TestIlluminaGenotypingArray"
PIPELINE_DIR="pipelines/broad/genotyping/illumina"
TEST_TYPE="${{ env.testType }}"
INPUTS_DIR="$PIPELINE_DIR/test_inputs/$TEST_TYPE"
USE_CALL_CACHE="${{ github.event.inputs.useCallCache || 'true' }}"
if [ "$USE_CALL_CACHE" == "true" ]; then
USE_CALL_CACHE_BOOL=true
else
USE_CALL_CACHE_BOOL=false
fi
# Create the submission_data.json file which will be the same for all inputs
SUBMISSION_DATA_FILE="submission_data.json"
# Use a heredoc to generate the JSON file content dynamically
cat <<EOF > "$SUBMISSION_DATA_FILE"
{
Expand All @@ -210,55 +207,103 @@ jobs:
}
EOF
echo "Created submission data file: $SUBMISSION_DATA_FILE"
cat "$SUBMISSION_DATA_FILE"
# Initialize variables to aggregate statuses and outputs
ALL_WORKFLOW_STATUSES="Workflow ID | Status"$'\n'"--- | ---"
ALL_OUTPUTS=""
# Initialize arrays to track submission and workflow statuses
declare -a SUBMISSION_IDS
declare -A WORKFLOW_STATUSES
# Loop through each file in the appropriate test inputs directory
INPUTS_DIR="$PIPELINE_DIR/test_inputs/$TEST_TYPE"

echo "Running tests with test type: $TEST_TYPE"

attempt=1

for input_file in "$INPUTS_DIR"/*.json; do
while [ $attempt -le $MAX_RETRIES ]; do
echo "Attempt $attempt: Submitting job for input file: $input_file"
SUBMISSION_ID=$(python3 scripts/firecloud_api/firecloud_api2.py submit_job \
--workspace-namespace "warp-pipelines" \
--workspace-name "WARP Tests" \
--sa-json-b64 "$SA_JSON_B64" \
--user "[email protected]" \
--submission_data_file "$SUBMISSION_DATA_FILE")
echo "submission id is $SUBMISSION_ID"

#SUBMISSION_ID=$(firecloud_action submit_job --submission_data_file "$SUBMISSION_DATA_FILE")

if [[ "$SUBMISSION_ID" == *"404"* ]]; then
echo "Error: Dockstore method not found. Retrying in $RETRY_DELAY seconds..."
sleep $RETRY_DELAY
((attempt++))
elif [ -z "$SUBMISSION_ID" ]; then
echo "Submission failed for input file: $input_file. No submission ID received."
break
else
echo "Submission successful. Submission ID: $SUBMISSION_ID"
SUBMISSION_IDS+=("$SUBMISSION_ID")
break
fi

if [ $attempt -gt $MAX_RETRIES ]; then
echo "Max retries reached. Exiting..."
fi
done
while [ $attempt -le $MAX_RETRIES ]; do
echo "Attempt $attempt: Submitting job for input file: $input_file"
SUBMISSION_ID=$(python3 scripts/firecloud_api/firecloud_api2.py submit_job \
--workspace-namespace "warp-pipelines" \
--workspace-name "WARP Tests" \
--sa-json-b64 "$SA_JSON_B64" \
--user "[email protected]" \
--submission_data_file "$SUBMISSION_DATA_FILE")
if [[ "$SUBMISSION_ID" == *"404"* ]]; then
echo "Error: Dockstore method not found. Retrying in $RETRY_DELAY seconds..."
sleep $RETRY_DELAY
((attempt++))
elif [ -z "$SUBMISSION_ID" ]; then
echo "Submission failed for input file: $input_file. No submission ID received."
break
else
echo "Submission successful. Submission ID: $SUBMISSION_ID"
SUBMISSION_IDS+=("$SUBMISSION_ID")
break
fi
if [ $attempt -gt $MAX_RETRIES ]; then
echo "Max retries reached. Exiting..."
fi
done

done
#- name: Submit to Terra and Monitor Workflows
# run: |
#
#
# # Create the submission_data.json file which will be the same for all inputs
# SUBMISSION_DATA_FILE="submission_data.json"
#
# # Use a heredoc to generate the JSON file content dynamically
# cat <<EOF > "$SUBMISSION_DATA_FILE"
# {
# "methodConfigurationNamespace": "warp-pipelines",
# "methodConfigurationName": "$PIPELINE_NAME",
# "useCallCache": $USE_CALL_CACHE_BOOL,
# "deleteIntermediateOutputFiles": false,
# "useReferenceDisks": true,
# "memoryRetryMultiplier": 1.2,
# "workflowFailureMode": "NoNewCalls",
# "userComment": "Automated submission",
# "ignoreEmptyOutputs": false
# }
# EOF
# echo "Created submission data file: $SUBMISSION_DATA_FILE"
#
# # Initialize variables to aggregate statuses and outputs
# ALL_WORKFLOW_STATUSES="Workflow ID | Status"$'\n'"--- | ---"
# ALL_OUTPUTS=""
#
# # Initialize arrays to track submission and workflow statuses
# declare -a SUBMISSION_IDS
# declare -A WORKFLOW_STATUSES

# # Loop through each file in the appropriate test inputs directory
# INPUTS_DIR="$PIPELINE_DIR/test_inputs/$TEST_TYPE"
#
# echo "Running tests with test type: $TEST_TYPE"
#
# attempt=1
#
# for input_file in "$INPUTS_DIR"/*.json; do
# while [ $attempt -le $MAX_RETRIES ]; do
# echo "Attempt $attempt: Submitting job for input file: $input_file"
# SUBMISSION_ID=$(python3 scripts/firecloud_api/firecloud_api2.py submit_job \
# --workspace-namespace "warp-pipelines" \
# --workspace-name "WARP Tests" \
# --sa-json-b64 "$SA_JSON_B64" \
# --user "[email protected]" \
# --submission_data_file "$SUBMISSION_DATA_FILE")
#
# if [[ "$SUBMISSION_ID" == *"404"* ]]; then
# echo "Error: Dockstore method not found. Retrying in $RETRY_DELAY seconds..."
# sleep $RETRY_DELAY
# ((attempt++))
# elif [ -z "$SUBMISSION_ID" ]; then
# echo "Submission failed for input file: $input_file. No submission ID received."
# break
# else
# echo "Submission successful. Submission ID: $SUBMISSION_ID"
# SUBMISSION_IDS+=("$SUBMISSION_ID")
# break
# fi
#
# if [ $attempt -gt $MAX_RETRIES ]; then
# echo "Max retries reached. Exiting..."
# fi
# done
# done
#



Expand Down

0 comments on commit b3153d0

Please sign in to comment.