Skip to content

Commit

Permalink
[Core] Make Ray an optional "extras" requirement
Browse files Browse the repository at this point in the history
Still included in built docker images
  • Loading branch information
njhill authored and joerunde committed May 16, 2024
1 parent 6b06bf0 commit 4836564
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 8 deletions.
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ RUN ldconfig /usr/local/cuda-12.4/compat/
# install vllm wheel first, so that torch etc will be installed
RUN --mount=type=bind,from=build,src=/workspace/dist,target=/vllm-workspace/dist \
--mount=type=cache,target=/root/.cache/pip \
pip install dist/*.whl --verbose
pip install "$(echo dist/*.whl)[ray]" --verbose
#################### vLLM installation IMAGE ####################


Expand Down
1 change: 0 additions & 1 deletion requirements-cuda.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
-r requirements-common.txt

# Dependencies for NVIDIA GPUs
ray >= 2.9
nvidia-ml-py # for pynvml package
vllm-nccl-cu12>=2.18,<2.19 # for downloading nccl library
torch == 2.3.0
Expand Down
3 changes: 1 addition & 2 deletions requirements-rocm.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
# Common dependencies
-r requirements-common.txt

# Dependencies for AMD GPUs
ray == 2.9.3
# No specific dependencies currently for AMD GPUs
20 changes: 16 additions & 4 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import subprocess
import sys
from shutil import which
from typing import Dict, List
from typing import Dict, List, Optional

import torch
from packaging.version import Version, parse
Expand Down Expand Up @@ -380,6 +380,20 @@ def _read_requirements(filename: str) -> List[str]:
return requirements


def get_extra_requirements() -> Optional[Dict[str, List[str]]]:
extras = {"tensorizer": ["tensorizer>=2.9.0"]}
if _is_cuda():
extras["ray"] = ["ray>=2.9"]
elif _is_hip():
extras["ray"] = ["ray==2.9.3"]
elif _is_neuron() or _is_cpu():
pass
else:
raise ValueError(
"Unsupported platform, please use CUDA, ROCM or Neuron.")
return extras


ext_modules = []

if _is_cuda():
Expand Down Expand Up @@ -425,9 +439,7 @@ def _read_requirements(filename: str) -> List[str]:
python_requires=">=3.8",
install_requires=get_requirements(),
ext_modules=ext_modules,
extras_require={
"tensorizer": ["tensorizer>=2.9.0"],
},
extras_require=get_extra_requirements(),
cmdclass={"build_ext": cmake_build_ext} if not _is_neuron() else {},
package_data=package_data,
)

0 comments on commit 4836564

Please sign in to comment.