diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index a05427c..f43f687 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -4,7 +4,7 @@ on: workflow_dispatch: env: DOCKER_BUILDKIT: 1 - AWS_REGION: eu-west-1 + UV_SYSTEM_PYTHON: 1 jobs: Run-test: runs-on: ubuntu-22.04 @@ -13,6 +13,9 @@ jobs: contents: read issues: write pull-requests: write + strategy: + matrix: + python-version: ["3.10", "3.11","3.12"] steps: - name: Checkout Repository uses: actions/checkout@v4 @@ -21,10 +24,13 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.12" - cache: "pip" + python-version: ${{ matrix.python-version }} + - name: Install the latest version of uv and set the python version to 3.12 + uses: astral-sh/setup-uv@v4 + with: + python-version: ${{ matrix.python-version }} - name: Install dependencies - run: pip install .[cpu,dev] + run: uv pip install .[cpu,dev] - name: Run test run: make test - name: Pytest coverage comment diff --git a/.gitignore b/.gitignore index 4e0b893..97d7466 100644 --- a/.gitignore +++ b/.gitignore @@ -83,6 +83,7 @@ profile_default/ ipython_config.py .env +.envrc .focoos .data notebooks/.data diff --git a/README.md b/README.md index 543053c..c583ed2 100644 --- a/README.md +++ b/README.md @@ -14,14 +14,14 @@ | focoos_isaid_nano | Semantic Segmentation | - | Satellite Imagery, 15 classes | | focoos_isaid_medium | Semantic Segmentation | - | Satellite Imagery, 15 classes | - # Focoos SDK - ## Requirements + For **local inference**, ensure that you have CUDA 12 and cuDNN 9 installed, as they are required for onnxruntime version 1.20.1. To install cuDNN 9: + ```bash apt-get -y install cudnn9-cuda-12 ``` @@ -31,22 +31,23 @@ To perform inference using TensorRT, ensure you have TensorRT version 10.5 insta # Install Nvidia GPU: + ```bash -pip install .[gpu] +pip install '.[gpu]' ``` Nvidia GPU,TensorRT: + ```bash -pip install .[gpu,tensorrt] +pip install '.[gpu,tensorrt]' ``` CPU,COREML: + ```bash -pip install .[cpu] +pip install '.[cpu]' ``` - - ## 🤖 Cloud Inference ```python @@ -58,12 +59,13 @@ model = focoos.get_remote_model("focoos_object365") model.deploy() detections = model.infer("./image.jpg", threshold=0.4) ``` + ## 🤖 Cloud Inference with Gradio setup FOCOOS_API_KEY_GRADIO environment variable with your Focoos API key ```bash -pip install .[gradio] +pip install '.[gradio]' ``` ```bash @@ -71,6 +73,7 @@ python gradio/app.py ``` ## Local Inference + ```python from focoos import Focoos diff --git a/focoos/config.py b/focoos/config.py index ef84e0b..7549626 100644 --- a/focoos/config.py +++ b/focoos/config.py @@ -1,12 +1,19 @@ +import typing from typing import Optional from pydantic_settings import BaseSettings from focoos.ports import PROD_API_URL, RuntimeTypes +LogLevel = typing.Literal["DEBUG", "INFO", "WARNING", "ERROR", "FATAL", "CRITICAL"] + class FocoosConfig(BaseSettings): focoos_api_key: Optional[str] = None + focoos_log_level: LogLevel = "DEBUG" default_host_url: str = PROD_API_URL runtime_type: RuntimeTypes = RuntimeTypes.ONNX_CUDA32 warmup_iter: int = 2 + + +FOCOOS_CONFIG = FocoosConfig() diff --git a/focoos/focoos.py b/focoos/focoos.py index d10c6e5..d63df2b 100644 --- a/focoos/focoos.py +++ b/focoos/focoos.py @@ -3,22 +3,21 @@ from tqdm import tqdm -from focoos.config import FocoosConfig +from focoos.config import FOCOOS_CONFIG from focoos.local_model import LocalModel -from focoos.ports import DatasetMetadata, ModelMetadata, ModelPreview, ModelStatus +from focoos.ports import DatasetMetadata, ModelMetadata, ModelPreview, RuntimeTypes from focoos.remote_model import RemoteModel from focoos.utils.logger import setup_logging from focoos.utils.system import HttpClient logger = setup_logging() -config = FocoosConfig() class Focoos: def __init__( self, - api_key: str = config.focoos_api_key, # type: ignore - host_url: str = config.default_host_url, + api_key: str = FOCOOS_CONFIG.focoos_api_key, # type: ignore + host_url: str = FOCOOS_CONFIG.default_host_url, ): self.api_key = api_key if not self.api_key: @@ -69,13 +68,12 @@ def list_focoos_models(self) -> list[ModelPreview]: def get_local_model( self, model_ref: str, + runtime_type: RuntimeTypes = FOCOOS_CONFIG.runtime_type, ) -> LocalModel: model_dir = os.path.join(self.cache_dir, model_ref) - if os.path.exists(os.path.join(model_dir, "model.onnx")): - return LocalModel(model_dir) - else: + if not os.path.exists(os.path.join(model_dir, "model.onnx")): self._download_model(model_ref) - return LocalModel(model_dir) + return LocalModel(model_dir, runtime_type) def get_remote_model(self, model_ref: str) -> RemoteModel: return RemoteModel(model_ref, self.http_client) diff --git a/focoos/local_model.py b/focoos/local_model.py index bfd3b69..4a106a9 100644 --- a/focoos/local_model.py +++ b/focoos/local_model.py @@ -7,7 +7,7 @@ from PIL import Image from supervision import BoxAnnotator, Detections, LabelAnnotator, MaskAnnotator -from focoos.config import FocoosConfig +from focoos.config import FOCOOS_CONFIG from focoos.ports import ( FocoosDetections, FocoosTask, @@ -24,14 +24,13 @@ ) logger = get_logger(__name__) -config = FocoosConfig() class LocalModel: def __init__( self, model_dir: Union[str, Path], - runtime_type: RuntimeTypes = config.runtime_type, + runtime_type: RuntimeTypes = FOCOOS_CONFIG.runtime_type, ): logger.debug(f"Runtime type: {runtime_type}, Loading model from {model_dir},") if not os.path.exists(model_dir): @@ -46,7 +45,7 @@ def __init__( runtime_type, str(os.path.join(model_dir, "model.onnx")), self.metadata, - config.warmup_iter, + FOCOOS_CONFIG.warmup_iter, ) def _read_metadata(self) -> ModelMetadata: diff --git a/focoos/ports.py b/focoos/ports.py index 9c05ad8..0d877e9 100644 --- a/focoos/ports.py +++ b/focoos/ports.py @@ -237,4 +237,4 @@ class RuntimeTypes(str, Enum): ONNX_TRT32 = "onnx_trt32" ONNX_TRT16 = "onnx_trt16" ONNX_CPU = "onnx_cpu" - COREML = "coreml" + ONNX_COREML = "onnx_coreml" diff --git a/focoos/remote_model.py b/focoos/remote_model.py index 45270ae..8c0f17a 100644 --- a/focoos/remote_model.py +++ b/focoos/remote_model.py @@ -5,35 +5,20 @@ from typing import Optional, Tuple, Union import numpy as np -from PIL import Image from supervision import BoxAnnotator, Detections, LabelAnnotator, MaskAnnotator -from tqdm import tqdm -from focoos.config import FocoosConfig from focoos.ports import ( - DeploymentMode, FocoosDet, FocoosDetections, FocoosTask, Hyperparameters, - LatencyMetrics, ModelMetadata, ModelStatus, - OnnxEngineOpts, TrainInstance, ) -from focoos.runtime import ONNXRuntime from focoos.utils.logger import get_logger from focoos.utils.system import HttpClient -from focoos.utils.vision import ( - focoos_detections_to_supervision, - image_loader, - image_preprocess, - scale_detections, - sv_to_focoos_detections, -) - -config = FocoosConfig() +from focoos.utils.vision import focoos_detections_to_supervision, image_loader logger = get_logger() diff --git a/focoos/runtime.py b/focoos/runtime.py index 27d3983..7f9ed4f 100644 --- a/focoos/runtime.py +++ b/focoos/runtime.py @@ -100,6 +100,14 @@ def __init__( options.enable_profiling = opts.verbose # options.intra_op_num_threads = 1 available_providers = ort.get_available_providers() + if opts.cuda and "CUDAExecutionProvider" not in available_providers: + self.logger.warning("CUDA ExecutionProvider not found.") + if opts.trt and "TensorrtExecutionProvider" not in available_providers: + self.logger.warning("Tensorrt ExecutionProvider not found.") + if opts.vino and "OpenVINOExecutionProvider" not in available_providers: + self.logger.warning("OpenVINO ExecutionProvider not found.") + if opts.coreml and "CoreMLExecutionProvider" not in available_providers: + self.logger.warning("CoreML ExecutionProvider not found.") # Set providers providers = [] dtype = np.float32 @@ -160,7 +168,9 @@ def __init__( self.dtype = dtype self.binding = binding self.ort_sess = ort.InferenceSession(model_path, options, providers=providers) - self.logger.info(f"[onnxruntime] Providers:{self.ort_sess.get_providers()}") + self.logger.info( + f"[onnxruntime] Active providers:{self.ort_sess.get_providers()}" + ) if self.ort_sess.get_inputs()[0].type == "tensor(uint8)": self.dtype = np.uint8 else: @@ -297,7 +307,7 @@ def get_runtime( ) elif runtime_type == RuntimeTypes.ONNX_CPU: opts = OnnxEngineOpts(cuda=False, verbose=False, warmup_iter=warmup_iter) - elif runtime_type == RuntimeTypes.COREML: + elif runtime_type == RuntimeTypes.ONNX_COREML: opts = OnnxEngineOpts( cuda=False, verbose=False, coreml=True, warmup_iter=warmup_iter ) diff --git a/focoos/utils/logger.py b/focoos/utils/logger.py index 40eb6af..b177b8a 100644 --- a/focoos/utils/logger.py +++ b/focoos/utils/logger.py @@ -2,6 +2,8 @@ import logging.config from functools import cache +from focoos.config import FOCOOS_CONFIG, LogLevel + class ColoredFormatter(logging.Formatter): log_format = "[%(asctime)s][%(levelname)s][%(name)s]: %(message)s" @@ -43,7 +45,7 @@ def format(self, record): "default": { "class": "logging.StreamHandler", "formatter": "color", - "level": "DEBUG", + "level": FOCOOS_CONFIG.focoos_log_level, }, }, "root": { # Configura il logger di default (root) @@ -53,7 +55,7 @@ def format(self, record): "loggers": { "focoos": { "handlers": ["default"], - "level": "DEBUG", + "level": FOCOOS_CONFIG.focoos_log_level, "propagate": False, }, "matplotlib": {"level": "WARNING"}, @@ -63,10 +65,10 @@ def format(self, record): @cache -def get_logger(name="focoos", level=logging.DEBUG): +def get_logger(name="focoos", level: LogLevel = FOCOOS_CONFIG.focoos_log_level): logger = logging.getLogger(name) logger.setLevel(level) - return logging.getLogger(name) + return logger def setup_logging(): diff --git a/focoos/utils/system.py b/focoos/utils/system.py index b1182c7..7f2e35f 100644 --- a/focoos/utils/system.py +++ b/focoos/utils/system.py @@ -2,18 +2,16 @@ import requests -from focoos.config import FocoosConfig - -config = FocoosConfig() +from focoos.config import FOCOOS_CONFIG class HttpClient: def __init__( - self, api_key: Optional[str] = None, host_url: str = config.default_host_url + self, + api_key: str, + host_url: str, ): - if not api_key and not config.focoos_api_key: - raise ValueError("API key is required") - self.api_key = api_key or config.focoos_api_key + self.api_key = api_key self.host_url = host_url self.default_headers = { diff --git a/notebooks/playground.ipynb b/notebooks/playground.ipynb index 5a2a41e..0d54d04 100644 --- a/notebooks/playground.ipynb +++ b/notebooks/playground.ipynb @@ -89,14 +89,12 @@ "\n", "# os.environ[\"RUNTIME_TYPE\"] = \"onnx_trt16\"\n", "from focoos import Focoos, DEV_API_URL\n", - "from focoos.config import FocoosConfig\n", + "from focoos.config import FOCOOS_CONFIG\n", "import os\n", "from pprint import pprint\n", "from supervision import plot_image\n", "\n", - "\n", - "config = FocoosConfig()\n", - "print(config)\n", + "print(FOCOOS_CONFIG)\n", "focoos = Focoos(\n", " api_key=os.getenv(\"FOCOOS_API_KEY\"),\n", " host_url=DEV_API_URL,\n", @@ -271,7 +269,7 @@ "metadata": {}, "outputs": [], "source": [ - "from focoos import Focoos, DEV_API_URL, DeploymentMode\n", + "from focoos import Focoos, DEV_API_URL\n", "import os\n", "from pprint import pprint\n", "from supervision import plot_image\n", diff --git a/pyproject.toml b/pyproject.toml index 72d6d70..638090b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ include = ["focoos**"] [project] name = "focoos" -version = "0.1.3" +version = "0.2.0" description = "Focoos SDK" readme = "README.md" requires-python = ">=3.10"