From 2ce6ed39bd93e5bea90ab0b0b02a8f9f079c7b00 Mon Sep 17 00:00:00 2001 From: Harsha HS Date: Tue, 2 Jul 2024 05:17:02 -0700 Subject: [PATCH 1/2] [ROCm] Add script to run multi gpu tests --- build_tools/rocm/run_xla_multi_gpu.sh | 72 +++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) create mode 100755 build_tools/rocm/run_xla_multi_gpu.sh diff --git a/build_tools/rocm/run_xla_multi_gpu.sh b/build_tools/rocm/run_xla_multi_gpu.sh new file mode 100755 index 0000000000000..61654167e6112 --- /dev/null +++ b/build_tools/rocm/run_xla_multi_gpu.sh @@ -0,0 +1,72 @@ +#!/usr/bin/env bash +# Copyright 2024 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# ============================================================================== + +set -e +set -x + +N_BUILD_JOBS=$(grep -c ^processor /proc/cpuinfo) +# If rocm-smi exists locally (it should) use it to find +# out how many GPUs we have to test with. +rocm-smi -i +STATUS=$? +if [ $STATUS -ne 0 ]; then TF_GPU_COUNT=4; else + TF_GPU_COUNT=$(rocm-smi -i|grep 'Device ID' |grep 'GPU' |wc -l) +fi +TF_TESTS_PER_GPU=1 +N_TEST_JOBS=$(expr ${TF_GPU_COUNT} \* ${TF_TESTS_PER_GPU}) + +echo "" +echo "Bazel will use ${N_BUILD_JOBS} concurrent build job(s) and ${N_TEST_JOBS} concurrent test job(s)." +echo "" + +# First positional argument (if any) specifies the ROCM_INSTALL_DIR +if [[ -n $1 ]]; then + ROCM_INSTALL_DIR=$1 +else + if [[ -z "${ROCM_PATH}" ]]; then + ROCM_INSTALL_DIR=/opt/rocm-6.0.2 + else + ROCM_INSTALL_DIR=$ROCM_PATH + fi +fi + +export PYTHON_BIN_PATH=`which python3` +export TF_NEED_ROCM=1 +export ROCM_PATH=$ROCM_INSTALL_DIR +TAGS_FILTER="gpu,requires-gpu-nvidia,-oss_excluded,-oss_serial" +UNSUPPORTED_GPU_TAGS="$(echo -requires-gpu-sm{60,70,80,86,89,90}{,-only})" +TAGS_FILTER="${TAGS_FILTER},${UNSUPPORTED_GPU_TAGS// /,}" + +bazel \ + test \ + --config=rocm \ + --build_tag_filters=${TAGS_FILTER} \ + --test_tag_filters=${TAGS_FILTER} \ + --test_timeout=920,2400,7200,9600 \ + --test_sharding_strategy=disabled \ + --test_output=errors \ + --flaky_test_attempts=3 \ + --keep_going \ + --local_test_jobs=${N_TEST_JOBS} \ + --test_env=TF_TESTS_PER_GPU=$TF_TESTS_PER_GPU \ + --test_env=TF_GPU_COUNT=$TF_GPU_COUNT \ + --action_env=XLA_FLAGS=--xla_gpu_force_compilation_parallelism=16 \ + --action_env=XLA_FLAGS=--xla_gpu_enable_llvm_module_compilation_parallelism=true \ + -- //xla/tests:collective_ops_test_e2e_gpu \ + //xla/tests:collective_ops_test_gpu \ + //xla/tests:replicated_io_feed_test_gpu \ + //xla/tools/multihost_hlo_runner:functional_hlo_runner_test_gpu From c1abbb88a26c884b16e48516b073601ac9b39937 Mon Sep 17 00:00:00 2001 From: Harsha HS Date: Wed, 3 Jul 2024 04:06:46 -0700 Subject: [PATCH 2/2] Add number of gpus check and distributed pjrt tests --- build_tools/rocm/run_xla_multi_gpu.sh | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/build_tools/rocm/run_xla_multi_gpu.sh b/build_tools/rocm/run_xla_multi_gpu.sh index 61654167e6112..f438dbbf4ad62 100755 --- a/build_tools/rocm/run_xla_multi_gpu.sh +++ b/build_tools/rocm/run_xla_multi_gpu.sh @@ -23,9 +23,14 @@ N_BUILD_JOBS=$(grep -c ^processor /proc/cpuinfo) # out how many GPUs we have to test with. rocm-smi -i STATUS=$? -if [ $STATUS -ne 0 ]; then TF_GPU_COUNT=4; else +if [ $STATUS -ne 0 ]; then TF_GPU_COUNT=1; else TF_GPU_COUNT=$(rocm-smi -i|grep 'Device ID' |grep 'GPU' |wc -l) fi +if [[ $TF_GPU_COUNT -lt 4 ]]; then + echo "Found only ${TF_GPU_COUNT} gpus, multi-gpu tests need atleast 4 gpus." + exit +fi + TF_TESTS_PER_GPU=1 N_TEST_JOBS=$(expr ${TF_GPU_COUNT} \* ${TF_TESTS_PER_GPU}) @@ -47,7 +52,7 @@ fi export PYTHON_BIN_PATH=`which python3` export TF_NEED_ROCM=1 export ROCM_PATH=$ROCM_INSTALL_DIR -TAGS_FILTER="gpu,requires-gpu-nvidia,-oss_excluded,-oss_serial" +TAGS_FILTER="-oss_excluded,-oss_serial" UNSUPPORTED_GPU_TAGS="$(echo -requires-gpu-sm{60,70,80,86,89,90}{,-only})" TAGS_FILTER="${TAGS_FILTER},${UNSUPPORTED_GPU_TAGS// /,}" @@ -69,4 +74,6 @@ bazel \ -- //xla/tests:collective_ops_test_e2e_gpu \ //xla/tests:collective_ops_test_gpu \ //xla/tests:replicated_io_feed_test_gpu \ - //xla/tools/multihost_hlo_runner:functional_hlo_runner_test_gpu + //xla/tools/multihost_hlo_runner:functional_hlo_runner_test_gpu \ + //xla/pjrt/distributed:topology_util_test \ + //xla/pjrt/distributed:client_server_test