Skip to content

Commit

Permalink
install from source with unpinned torch
Browse files Browse the repository at this point in the history
  • Loading branch information
baptistecolle committed Aug 8, 2024
1 parent bacfc98 commit e996b89
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 7 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -172,3 +172,5 @@ work-in-progress/
experiments/
amdsmi/
amd-*

external_repos/
7 changes: 4 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ build_cpu_image:
docker build --build-arg IMAGE=optimum-benchmark:latest-cpu --build-arg USER_ID=$(USER_ID) --build-arg GROUP_ID=$(GROUP_ID) -t optimum-benchmark:latest-cpu docker/unroot

build_cuda_image:
docker build --build-arg TORCH_VERSION=$(TORCH_VERSION) -t optimum-benchmark:latest-cuda docker/cuda
docker build -t optimum-benchmark:latest-cuda docker/cuda
docker build --build-arg IMAGE=optimum-benchmark:latest-cuda --build-arg USER_ID=$(USER_ID) --build-arg GROUP_ID=$(GROUP_ID) -t optimum-benchmark:latest-cuda docker/unroot

build_cuda_ort_image:
Expand Down Expand Up @@ -109,7 +109,8 @@ install_cli_cpu_neural_compressor:
pip install -e .[testing,peft,timm,diffusers,neural-compressor]

install_cli_cuda_pytorch:
pip install -e .[testing,timm,diffusers,peft,autoawq,auto-gptq,bitsandbytes,deepspeed]
python scripts/install_autoawq.py
pip install -e .[testing,timm,diffusers,peft,auto-gptq,bitsandbytes,deepspeed]

install_cli_rocm_pytorch:
pip install -e .[testing,timm,diffusers,peft,autoawq,auto-gptq,deepspeed]
Expand Down Expand Up @@ -159,7 +160,7 @@ test_cli_cuda_pytorch_multi_gpu:
pytest -s -k "cli and cuda and pytorch and (dp or ddp or device_map or deepspeed) and not awq"

test_cli_cuda_pytorch_single_gpu:
pytest -s -k "cli and cuda and pytorch and not (dp or ddp or device_map or deepspeed) and not awq"
pytest -s -k "cli and cuda and pytorch and not (dp or ddp or device_map or deepspeed) and not awq" --ignore=external_repos

test_cli_cuda_torch_ort_multi_gpu:
pytest -s -k "cli and cuda and torch-ort and (dp or ddp or device_map or deepspeed) and not peft"
Expand Down
12 changes: 8 additions & 4 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,13 +58,14 @@
"Please install amdsmi from https://github.com/ROCm/amdsmi to enable this feature."
)



if USE_ROCM:
AUTOAWQ = "autoawq@https://github.com/casper-hansen/AutoAWQ/releases/download/v0.2.1/autoawq-0.2.1+rocm571-cp310-cp310-linux_x86_64.whl"
AUTOGPTQ = "auto-gptq@https://huggingface.github.io/autogptq-index/whl/rocm573/auto-gptq/auto_gptq-0.7.1%2Brocm5.7.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"
else:
AUTOAWQ = "autoawq@git+https://github.com/casper-hansen/AutoAWQ.git"
AUTOAWQ_KERNELS = "autoawq-kernels@git+https://github.com/casper-hansen/AutoAWQ_kernels.git"

else:
# AUTOAWQ will be installed from source via scripts/install_autoawq.py script
AUTOGPTQ = "auto-gptq@git+https://github.com/PanQiWei/AutoGPTQ.git"

EXTRAS_REQUIRE = {
Expand All @@ -81,7 +82,7 @@
"py-txi": ["py-txi"],
"vllm": ["vllm"],
# optional dependencies
"autoawq": [AUTOAWQ_KERNELS, AUTOAWQ],
"autoawq": [],
"auto-gptq": ["optimum", AUTOGPTQ],
"sentence-transformers": ["sentence-transformers"],
"bitsandbytes": ["bitsandbytes"],
Expand All @@ -93,6 +94,9 @@
"peft": ["peft"],
}

if USE_ROCM:
EXTRAS_REQUIRE["autoawq"] = [AUTOAWQ]


setup(
packages=find_packages(),
Expand Down

0 comments on commit e996b89

Please sign in to comment.