From 7136c7157ba01c016d51446c2c3c405e0a21d6ee Mon Sep 17 00:00:00 2001 From: brandon Date: Fri, 15 Sep 2023 14:19:26 -0700 Subject: [PATCH] Working on implementing patience time and wait time --- src/groundlight/client.py | 52 +++++++++++++++++++++++++++++++++++++++ src/groundlight/todo.txt | 10 ++++++++ 2 files changed, 62 insertions(+) create mode 100644 src/groundlight/todo.txt diff --git a/src/groundlight/client.py b/src/groundlight/client.py index 65e0b97b..c34f65f1 100644 --- a/src/groundlight/client.py +++ b/src/groundlight/client.py @@ -43,6 +43,7 @@ class Groundlight: """ DEFAULT_WAIT: float = 30.0 + DEFAULT_PATIENCE: float = 60.0 POLLING_INITIAL_DELAY = 0.25 POLLING_EXPONENTIAL_BACKOFF = 1.3 # This still has the nice backoff property that the max number of requests @@ -223,6 +224,57 @@ def submit_image_query( # noqa: PLR0913 # pylint: disable=too-many-arguments image_query = self.wait_for_confident_result(image_query, confidence_threshold=threshold, timeout_sec=wait) return self._fixup_image_query(image_query) + def ask_confident(self): + pass + + def ask_ml(self): + pass + + def ask_async( + self, + detector: Union[Detector, str], + image: Union[str, bytes, Image.Image, BytesIO, BufferedReader, np.ndarray], + patience_time: float = DEFAULT_PATIENCE, + human_review: Optional[str] = None, + ) -> ImageQuery: + """Sends an image to Groundlight without waiting for a response. + :param detector: the Detector object, or string id of a detector like `det_12345` + :param image: The image, in several possible formats: + - filename (string) of a jpeg file + - byte array or BytesIO or BufferedReader with jpeg bytes + - numpy array with values 0-255 and dimensions (H,W,3) in BGR order + (Note OpenCV uses BGR not RGB. `img[:, :, ::-1]` will reverse the channels) + - PIL Image + Any binary format must be JPEG-encoded already. Any pixel format will get + converted to JPEG at high quality before sending to service. + :param patience_time: How long Groundlight will work to answer the query. + :param human_review: If `None` or `DEFAULT`, send the image query for human review + only if the ML prediction is not confident. + If set to `ALWAYS`, always send the image query for human review. + If set to `NEVER`, never send the image query for human review. + """ + + detector_id = detector.id if isinstance(detector, Detector) else detector + + image_bytesio: ByteStreamWrapper = parse_supported_image_types(image) + + params = {"detector_id": detector_id, "body": image_bytesio} + if patience_time == 0: + params["patience_time"] = self.DEFAULT_PATIENCE + else: + params["patience_time"] = patience_time + + if human_review is not None: + params["human_review"] = human_review + + # If no inspection_id is provided, we submit the image query using image_queries_api (autogenerated via OpenAPI) + # However, our autogenerated code does not currently support inspection_id, so if an inspection_id was + # provided, we use the private API client instead. + + raw_image_query = self.image_queries_api.submit_image_query(**params) + image_query = ImageQuery.parse_obj(raw_image_query.to_dict()) + return self._fixup_image_query(image_query) + def wait_for_confident_result( self, image_query: Union[ImageQuery, str], diff --git a/src/groundlight/todo.txt b/src/groundlight/todo.txt new file mode 100644 index 00000000..8aeeda58 --- /dev/null +++ b/src/groundlight/todo.txt @@ -0,0 +1,10 @@ +out of scope: + changing the behavior of submit_image_query - this is a breaking change that will require a lot of love and planning. I see submit_image_query as the heavy duty utility function, being a superset of the three primary functions we're introducing +TODO: + write + aks_ml + aks confident + add test cases + ask_async + aks_ml + aks confident