diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1bc2da6..a05427c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -24,7 +24,7 @@ jobs: python-version: "3.12" cache: "pip" - name: Install dependencies - run: pip install .[dev,gpu] + run: pip install .[cpu,dev] - name: Run test run: make test - name: Pytest coverage comment diff --git a/README.md b/README.md index b302398..543053c 100644 --- a/README.md +++ b/README.md @@ -30,12 +30,21 @@ To perform inference using TensorRT, ensure you have TensorRT version 10.5 insta # Install +Nvidia GPU: ```bash -pip install . +pip install .[gpu] +``` + +Nvidia GPU,TensorRT: +```bash +pip install .[gpu,tensorrt] +``` + +CPU,COREML: +```bash +pip install .[cpu] ``` -- local inference (CPU, COREML): `pip install .` -- local inference with GPU (CUDA): `pip install .[gpu]` ## 🤖 Cloud Inference diff --git a/notebooks/playground.ipynb b/notebooks/playground.ipynb index e02b43d..5a2a41e 100644 --- a/notebooks/playground.ipynb +++ b/notebooks/playground.ipynb @@ -13,7 +13,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install -e ..[dev,gpu,tensorrt]" + "%pip install -e ..[dev,gpu]" ] }, { diff --git a/pyproject.toml b/pyproject.toml index 389665a..5af5db0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,7 +20,6 @@ dependencies = [ "tqdm~=4.67.1", "numpy~=1.26.4", "scipy~=1.14.1", - "onnxruntime==1.20.1" ] authors = [{ name = "focoos.ai", email = "info@focoos.ai" }] @@ -43,4 +42,6 @@ dev = [ "gradio~=5.3.0", "sniffio~=1.2.0", ] +cpu = ["onnxruntime==1.20.1"] gpu = ["onnxruntime-gpu==1.20.1","nvidia-cuda-runtime-cu12==12.4.127"] +tensorrt = ["tensorrt==10.5.0"]