From 52003f7a6d60ee6dfb07339f1e2b236b98047807 Mon Sep 17 00:00:00 2001 From: brandon Date: Thu, 12 Oct 2023 15:20:13 -0700 Subject: [PATCH 01/14] bump version --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 42284c1b..b8828095 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ packages = [ {include = "**/*.py", from = "src"}, ] readme = "README.md" -version = "0.11.1" +version = "0.12.0" [tool.poetry.dependencies] # For certifi, use ">=" instead of "^" since it upgrades its "major version" every year From 7c319b1ae7e6764491a24de82bed526d0838b487 Mon Sep 17 00:00:00 2001 From: Brandon <132288221+brandon-groundlight@users.noreply.github.com> Date: Thu, 12 Oct 2023 15:37:05 -0700 Subject: [PATCH 02/14] Update UNSURE to be UNCLEAR, catch __USURE received from the service (#105) --- src/groundlight/binary_labels.py | 16 +++++++++++----- test/integration/test_groundlight.py | 2 +- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/src/groundlight/binary_labels.py b/src/groundlight/binary_labels.py index a0f615b4..de3216a1 100644 --- a/src/groundlight/binary_labels.py +++ b/src/groundlight/binary_labels.py @@ -15,19 +15,25 @@ class Label(str, Enum): YES = "YES" NO = "NO" - UNSURE = "UNSURE" + UNCLEAR = "UNCLEAR" -VALID_DISPLAY_LABELS = {Label.YES, Label.NO, Label.UNSURE} +VALID_DISPLAY_LABELS = {Label.YES, Label.NO, Label.UNCLEAR} class DeprecatedLabel(str, Enum): PASS = "PASS" FAIL = "FAIL" NEEDS_REVIEW = "NEEDS_REVIEW" + UNSURE = "__UNSURE" -DEPRECATED_LABEL_NAMES = {DeprecatedLabel.PASS, DeprecatedLabel.FAIL, DeprecatedLabel.NEEDS_REVIEW} +DEPRECATED_LABEL_NAMES = { + DeprecatedLabel.PASS, + DeprecatedLabel.FAIL, + DeprecatedLabel.NEEDS_REVIEW, + DeprecatedLabel.UNSURE, +} def convert_internal_label_to_display( @@ -47,8 +53,8 @@ def convert_internal_label_to_display( return Label.YES if upper in {Label.NO, DeprecatedLabel.FAIL}: return Label.NO - if upper in {Label.UNSURE, DeprecatedLabel.NEEDS_REVIEW}: - return Label.UNSURE + if upper in {Label.UNCLEAR, DeprecatedLabel.NEEDS_REVIEW, DeprecatedLabel.UNSURE}: + return Label.UNCLEAR logger.warning(f"Unrecognized internal label {label} - leaving it alone as a string.") return label diff --git a/test/integration/test_groundlight.py b/test/integration/test_groundlight.py index ee29e6b0..b1486c19 100644 --- a/test/integration/test_groundlight.py +++ b/test/integration/test_groundlight.py @@ -337,7 +337,7 @@ def test_add_label_names(gl: Groundlight, image_query_yes: ImageQuery, image_que # We may want to support something like this in the future, but not yet with pytest.raises(ValueError): - gl.add_label(iqid_yes, Label.UNSURE) + gl.add_label(iqid_yes, Label.UNCLEAR) def test_label_conversion_produces_strings(): From 7bb71b1400dc9b5ea487aa1e47b94541c606526a Mon Sep 17 00:00:00 2001 From: senorkumar Date: Thu, 12 Oct 2023 18:02:51 -0700 Subject: [PATCH 03/14] rough draft of demo --- .../building-applications/async-queries.md | 73 +++++++++++++++++++ .../building-applications.md | 2 +- src/groundlight/client.py | 7 +- 3 files changed, 75 insertions(+), 7 deletions(-) create mode 100644 docs/docs/building-applications/async-queries.md diff --git a/docs/docs/building-applications/async-queries.md b/docs/docs/building-applications/async-queries.md new file mode 100644 index 00000000..824f36bf --- /dev/null +++ b/docs/docs/building-applications/async-queries.md @@ -0,0 +1,73 @@ +# Asynchronous Queries + +Groundlight provides a simple interface for submitting asynchronous queries. This is useful for times in which the thread or machine submitting image queries is not the same thread or machine that will be retrieving and using the results. For example, you might have a forward deployed robot or camera that submits image queries to Groundlight, and a separate server that retrieves the results and takes action based on them. We will refer to these two machines as the **submitting machine** and the **retrieving machine**. + +## Setup Submitting Machine +On the **submitting machine**, you will need to install the Groundlight Python SDK. Then you can submit image queries asynchronously using the `ask_async` interface (read the full documentation [here](TODO put link here)). `ask_async` submits your query and returns as soon as the query is submitted. It does not wait for an answer to be available prior to returning to minimize the time your program spends interacting with Groundlight. As a result, the `ImageQuery` object `ask_async` returns lacks a `result` (the `result` field will `None`). This is alright for this use case as the **submitting machine** is not interested in the result. Instead the **submitting machine** just needs to communicate the `ImageQuery.id`s to the **retrieving machine** - this might be done via a database, a message queue, or some other mechanism. For this example, we assume you are using a database where you save the `ImageQuery.id` to it via `db.save(image_query.id)`. + +```python notest +from groundlight import Groundlight +import cv2 +from time import sleep + +detector = gl.get_or_create_detector(name="your_detector_name", query="your_query") + +cam = cv2.VideoCapture(0) # Initialize camera (0 is the default index) + +while True: # TODO: add a way to exit this loop... not sure what makes sense here + _, image = cam.read() # Capture one frame from the camera + image_query = gl.ask_async(detector=detector, image=image) # Submit the frame to Groundlight + db.save(image_query.id) # Save the image_query.id to a database for the retrieving machine to use + sleep(10) # Sleep for 10 seconds before submitting the next query + +cam.release() # Release the camera + +``` + +## Setup Retrieving Machine +On the **retrieving machine** you will need to install the Groundlight Python SDK. Then you can retrieve the results of the image queries submitted by another machine using `get_image_query`. The **retrieving machine** can then use the `ImageQuery.result` to take action based on the result for whatever application you are building. For this example, we assume your application looks up the next image query to process from a database via `db.get_next_image_query_id()`. +```python notest +from groundlight import Groundlight + +detector = gl.get_or_create_detector(name="your_detector_name", query="your_query") + +def process_image_query(image_query_id): + ''' + : param image_query_id: the id of the image query to process from the database + ''' + + # retrieve the image query from Groundlight + image_query = gl.get_image_query(id=image_query_id) + result = image_query.result + # take action based on the result + if result == 'YES': + # take action if the result is YES + pass + elif result == 'NO': + # take action if the result is NO + pass + elif result == 'UNCLEAR' + # take action if the result is UNCLEAR + pass + +while True: # TODO: add a way to exit this loop... not sure what makes sense here + image_query_id = db.get_next_image_query_id() # get the next image query id from the database + process_image_query(image_query_id) # process the image query +``` + +## Important Considerations +When you submit an image query asynchronously, ML prediction on your query is not instant. So attempting to retrieve the result immediately after submitting the query will likely result in an 'UNCLEAR' result as Groundlight is still processing your query. Instead, if your code needs a `result` synchronously we recommend using one of our methods with a polling mechanism to retrieve the result, like `submit_image_query`. + +```python notest +from groundlight import Groundlight +from PIL import Image + +detector = gl.get_or_create_detector(name="your_detector_name", query="your_query") +image = Image.open("/path/to/your/image.jpg") +image_query = gl.ask_async(detector=detector, image=image) # Submit the frame to Groundlight +result = image_query.result # This will likely be 'UNCLEAR' as Groundlight is still processing your query +``` + +# TODO: what other considerations are there? + + diff --git a/docs/docs/building-applications/building-applications.md b/docs/docs/building-applications/building-applications.md index 68a5b877..759a6773 100644 --- a/docs/docs/building-applications/building-applications.md +++ b/docs/docs/building-applications/building-applications.md @@ -33,7 +33,7 @@ This sample application demonstrates how to set up a Raspberry Pi-based door loc ### Industrial and Manufacturing Applications Groundlight can be used to [apply modern natural-language-based computer vision to industrial and manufacturing applications](/docs/building-applications/industrial). - +# TODO: update Further reading section ## Further Reading For more in-depth guides on various aspects of building applications with Groundlight, check out the following pages: diff --git a/src/groundlight/client.py b/src/groundlight/client.py index dc56dee1..92fba904 100644 --- a/src/groundlight/client.py +++ b/src/groundlight/client.py @@ -13,12 +13,7 @@ from groundlight.binary_labels import Label, convert_display_label_to_internal, convert_internal_label_to_display from groundlight.config import API_TOKEN_VARIABLE_NAME, API_TOKEN_WEB_URL from groundlight.images import ByteStreamWrapper, parse_supported_image_types -from groundlight.internalapi import ( - GroundlightApiClient, - NotFoundError, - iq_is_confident, - sanitize_endpoint_url, -) +from groundlight.internalapi import GroundlightApiClient, NotFoundError, iq_is_confident, sanitize_endpoint_url from groundlight.optional_imports import Image, np logger = logging.getLogger("groundlight.sdk") From 0a7b928c567edf94edecf1039d1f3e3084665797 Mon Sep 17 00:00:00 2001 From: Sunil Kumar Date: Mon, 16 Oct 2023 16:48:34 -0700 Subject: [PATCH 04/14] ask_async (#102) * initial commit * added ask_async to submit_image_query * Automatically reformatting code * added ask async method * Automatically reformatting code * added integration tests (requires BE merge first) * Automatically reformatting code * satisfying mypy * Automatically reformatting code * fix comments * change what type of exception test is catching * Automatically reformatting code * fix imports organization issue * fix implementation - wait must be 0 not None * Automatically reformatting code * forgot to make wait=0 in relevant test * feedback from PR review * Automatically reformatting code * ensure want_async is a serializable bool * add description * updated sphinx reqs to render some of the dependencies * updated docstring for ask_async and fixed small sphinx bugs in other folk's docstrings * Tests aren't passing because I didn't update the autogenerated code to expect a new param * Revert "Tests aren't passing because I didn't update the autogenerated code to expect a new param" This reverts commit 2477fd51149f9a030de4698ddef452f840707951. * fix generated * Automatically reformatting code * fix lint * Automatically reformatting code * Revert "Automatically reformatting code" This reverts commit cb9359e90f461d0f5c22496ece0839f447201a4a. * Revert "fix generated" This reverts commit 935c03685c5fe81313818160820bb3282abf2756. * Revert "Revert "Tests aren't passing because I didn't update the autogenerated code to expect a new param"" This reverts commit 07670e328c0528d636c7bae776f09ca4b0b5f0ef. * Revert "Tests aren't passing because I didn't update the autogenerated code to expect a new param" This reverts commit 2477fd51149f9a030de4698ddef452f840707951. * Revert "updated docstring for ask_async and fixed small sphinx bugs in other folk's docstrings" This reverts commit 67e3edd3aa6dfeadff9fcb354e4ad34057c08ba2. * third time at generated docs is the charm * Automatically reformatting code * finish making tests work * Automatically reformatting code --------- Co-authored-by: Auto-format Bot --- .vscode/settings.json | 17 +- generated/docs/ImageQueriesApi.md | 4 +- generated/docs/ImageQuery.md | 2 +- generated/model.py | 4 +- .../openapi_client/api/image_queries_api.py | 5 + generated/openapi_client/model/image_query.py | 8 +- poetry.lock | 178 +++++++++++- pyproject.toml | 1 + spec/public-api.yaml | 268 +++++++++--------- sphinx_docs/conf.py | 2 +- src/groundlight/client.py | 74 ++++- test/integration/test_groundlight.py | 57 ++++ 12 files changed, 462 insertions(+), 158 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index 2fa0c950..da8bb988 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -3,6 +3,17 @@ "editor.codeActionsOnSave": { "source.organizeImports": true }, - "python.analysis.extraPaths": ["./generated"], - "python.formatting.provider": "black" -} + "editor.rulers": [ + 100, + 120 + ], + "python.analysis.extraPaths": [ + "./generated" + ], + "python.formatting.provider": "black", + "[python]": { + "editor.codeActionsOnSave": { + "source.organizeImports": false + } + } +} \ No newline at end of file diff --git a/generated/docs/ImageQueriesApi.md b/generated/docs/ImageQueriesApi.md index 248933f8..6426adf8 100644 --- a/generated/docs/ImageQueriesApi.md +++ b/generated/docs/ImageQueriesApi.md @@ -207,6 +207,7 @@ with openapi_client.ApiClient(configuration) as api_client: detector_id = "detector_id_example" # str | Choose a detector by its ID. human_review = "human_review_example" # str | If set to `DEFAULT`, use the regular escalation logic (i.e., send the image query for human review if the ML model is not confident). If set to `ALWAYS`, always send the image query for human review even if the ML model is confident. If set to `NEVER`, never send the image query for human review even if the ML model is not confident. (optional) patience_time = 3.14 # float | How long to wait for a confident response. (optional) + want_async = "want_async_example" # str | If \"true\" then submitting an image query returns immediately without a result. The result will be computed asynchronously and can be retrieved later. (optional) body = open('@path/to/image.jpeg', 'rb') # file_type | (optional) # example passing only required values which don't have defaults set @@ -219,7 +220,7 @@ with openapi_client.ApiClient(configuration) as api_client: # example passing only required values which don't have defaults set # and optional values try: - api_response = api_instance.submit_image_query(detector_id, human_review=human_review, patience_time=patience_time, body=body) + api_response = api_instance.submit_image_query(detector_id, human_review=human_review, patience_time=patience_time, want_async=want_async, body=body) pprint(api_response) except openapi_client.ApiException as e: print("Exception when calling ImageQueriesApi->submit_image_query: %s\n" % e) @@ -233,6 +234,7 @@ Name | Type | Description | Notes **detector_id** | **str**| Choose a detector by its ID. | **human_review** | **str**| If set to `DEFAULT`, use the regular escalation logic (i.e., send the image query for human review if the ML model is not confident). If set to `ALWAYS`, always send the image query for human review even if the ML model is confident. If set to `NEVER`, never send the image query for human review even if the ML model is not confident. | [optional] **patience_time** | **float**| How long to wait for a confident response. | [optional] + **want_async** | **str**| If \"true\" then submitting an image query returns immediately without a result. The result will be computed asynchronously and can be retrieved later. | [optional] **body** | **file_type**| | [optional] ### Return type diff --git a/generated/docs/ImageQuery.md b/generated/docs/ImageQuery.md index 98d125b2..759118e5 100644 --- a/generated/docs/ImageQuery.md +++ b/generated/docs/ImageQuery.md @@ -11,7 +11,7 @@ Name | Type | Description | Notes **query** | **str** | A question about the image. | [readonly] **detector_id** | **str** | Which detector was used on this image query? | [readonly] **result_type** | **bool, date, datetime, dict, float, int, list, str, none_type** | What type of result are we returning? | [readonly] -**result** | **bool, date, datetime, dict, float, int, list, str, none_type** | | [readonly] +**result** | **bool, date, datetime, dict, float, int, list, str, none_type** | | [optional] [readonly] **any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/generated/model.py b/generated/model.py index dea486ca..6b62b9ae 100644 --- a/generated/model.py +++ b/generated/model.py @@ -1,6 +1,6 @@ # generated by datamodel-codegen: # filename: public-api.yaml -# timestamp: 2023-08-09T20:46:11+00:00 +# timestamp: 2023-10-16T23:29:00+00:00 from __future__ import annotations @@ -69,7 +69,7 @@ class ImageQuery(BaseModel): query: str = Field(..., description="A question about the image.") detector_id: str = Field(..., description="Which detector was used on this image query?") result_type: ResultTypeEnum = Field(..., description="What type of result are we returning?") - result: ClassificationResult + result: Optional[ClassificationResult] = None class PaginatedDetectorList(BaseModel): diff --git a/generated/openapi_client/api/image_queries_api.py b/generated/openapi_client/api/image_queries_api.py index dd760ba8..eb2ff05b 100644 --- a/generated/openapi_client/api/image_queries_api.py +++ b/generated/openapi_client/api/image_queries_api.py @@ -133,6 +133,7 @@ def __init__(self, api_client=None): "detector_id", "human_review", "patience_time", + "want_async", "body", ], "required": [ @@ -149,17 +150,20 @@ def __init__(self, api_client=None): "detector_id": (str,), "human_review": (str,), "patience_time": (float,), + "want_async": (str,), "body": (file_type,), }, "attribute_map": { "detector_id": "detector_id", "human_review": "human_review", "patience_time": "patience_time", + "want_async": "want_async", }, "location_map": { "detector_id": "query", "human_review": "query", "patience_time": "query", + "want_async": "query", "body": "body", }, "collection_format_map": {}, @@ -299,6 +303,7 @@ def submit_image_query(self, detector_id, **kwargs): Keyword Args: human_review (str): If set to `DEFAULT`, use the regular escalation logic (i.e., send the image query for human review if the ML model is not confident). If set to `ALWAYS`, always send the image query for human review even if the ML model is confident. If set to `NEVER`, never send the image query for human review even if the ML model is not confident. . [optional] patience_time (float): How long to wait for a confident response.. [optional] + want_async (str): If \"true\" then submitting an image query returns immediately without a result. The result will be computed asynchronously and can be retrieved later.. [optional] body (file_type): [optional] _return_http_data_only (bool): response data without head status code and headers. Default is True. diff --git a/generated/openapi_client/model/image_query.py b/generated/openapi_client/model/image_query.py index 0100e22b..b1d5deec 100644 --- a/generated/openapi_client/model/image_query.py +++ b/generated/openapi_client/model/image_query.py @@ -168,9 +168,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args - def _from_openapi_data( - cls, id, type, created_at, query, detector_id, result_type, result, *args, **kwargs - ): # noqa: E501 + def _from_openapi_data(cls, id, type, created_at, query, detector_id, result_type, *args, **kwargs): # noqa: E501 """ImageQuery - a model defined in OpenAPI Args: @@ -180,7 +178,6 @@ def _from_openapi_data( query (str): A question about the image. detector_id (str): Which detector was used on this image query? result_type (bool, date, datetime, dict, float, int, list, str, none_type): What type of result are we returning? - result (bool, date, datetime, dict, float, int, list, str, none_type): Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -213,6 +210,7 @@ def _from_openapi_data( Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) + result (bool, date, datetime, dict, float, int, list, str, none_type): [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) @@ -247,7 +245,6 @@ def _from_openapi_data( self.query = query self.detector_id = detector_id self.result_type = result_type - self.result = result for var_name, var_value in kwargs.items(): if ( var_name not in self.attribute_map @@ -306,6 +303,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) + result (bool, date, datetime, dict, float, int, list, str, none_type): [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) diff --git a/poetry.lock b/poetry.lock index 6d2f7e5c..748907a5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. [[package]] name = "alabaster" version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -15,6 +16,7 @@ files = [ name = "annotated-types" version = "0.5.0" description = "Reusable constraint types to use with typing.Annotated" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -29,6 +31,7 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} name = "argcomplete" version = "3.1.2" description = "Bash tab completion for argparse" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -46,6 +49,7 @@ test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] name = "astroid" version = "2.11.7" description = "An abstract syntax tree for Python with inference support." +category = "dev" optional = false python-versions = ">=3.6.2" files = [ @@ -64,6 +68,7 @@ wrapt = ">=1.11,<2" name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -85,6 +90,7 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "autodoc-pydantic" version = "2.0.1" description = "Seamlessly integrate pydantic models in your Sphinx documentation." +category = "dev" optional = false python-versions = ">=3.7.1,<4.0.0" files = [ @@ -93,7 +99,6 @@ files = [ ] [package.dependencies] -importlib-metadata = {version = ">1", markers = "python_version <= \"3.8\""} pydantic = ">=2.0,<3.0.0" pydantic-settings = ">=2.0,<3.0.0" Sphinx = ">=4.0" @@ -108,6 +113,7 @@ test = ["coverage (>=7,<8)", "pytest (>=7,<8)"] name = "babel" version = "2.13.0" description = "Internationalization utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -122,6 +128,7 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] name = "black" version = "23.3.0" description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -172,6 +179,7 @@ uvloop = ["uvloop (>=0.15.2)"] name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -183,6 +191,7 @@ files = [ name = "cfgv" version = "3.3.1" description = "Validate configuration and produce human readable error messages." +category = "dev" optional = false python-versions = ">=3.6.1" files = [ @@ -194,6 +203,7 @@ files = [ name = "chardet" version = "4.0.0" description = "Universal encoding detector for Python 2 and 3" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -205,6 +215,7 @@ files = [ name = "charset-normalizer" version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -304,6 +315,7 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -319,6 +331,7 @@ importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -330,6 +343,7 @@ files = [ name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -405,6 +419,7 @@ toml = ["tomli"] name = "datamodel-code-generator" version = "0.22.1" description = "Datamodel Code Generator" +category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -437,6 +452,7 @@ http = ["httpx"] name = "dill" version = "0.3.7" description = "serialize all of Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -451,6 +467,7 @@ graph = ["objgraph (>=1.7.2)"] name = "distlib" version = "0.3.7" description = "Distribution utilities" +category = "dev" optional = false python-versions = "*" files = [ @@ -462,6 +479,7 @@ files = [ name = "dnspython" version = "2.3.0" description = "DNS toolkit" +category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -482,6 +500,7 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] name = "dnspython" version = "2.4.2" description = "DNS toolkit" +category = "dev" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -501,6 +520,7 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] name = "docutils" version = "0.18.1" description = "Docutils -- Python Documentation Utilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -512,6 +532,7 @@ files = [ name = "email-validator" version = "2.0.0.post2" description = "A robust email address syntax and deliverability validation library." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -527,6 +548,7 @@ idna = ">=2.0.0" name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -541,6 +563,7 @@ test = ["pytest (>=6)"] name = "filelock" version = "3.12.2" description = "A platform independent file lock." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -556,6 +579,7 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "p name = "frozendict" version = "2.3.8" description = "A simple immutable dictionary" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -602,6 +626,7 @@ files = [ name = "genson" version = "1.2.2" description = "GenSON is a powerful, user-friendly JSON Schema generator." +category = "dev" optional = false python-versions = "*" files = [ @@ -612,6 +637,7 @@ files = [ name = "identify" version = "2.5.24" description = "File identification library for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -626,6 +652,7 @@ license = ["ukkonen"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -637,6 +664,7 @@ files = [ name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -648,6 +676,7 @@ files = [ name = "importlib-metadata" version = "6.7.0" description = "Read metadata from Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -668,6 +697,7 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-resources" version = "5.12.0" description = "Read resources from Python packages" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -686,6 +716,7 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec name = "inflect" version = "5.6.2" description = "Correctly generate plurals, singular nouns, ordinals, indefinite articles; convert numbers to words" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -701,6 +732,7 @@ testing = ["pygments", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdo name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -712,6 +744,7 @@ files = [ name = "isort" version = "5.11.5" description = "A Python utility / library to sort Python imports." +category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -729,6 +762,7 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -746,6 +780,7 @@ i18n = ["Babel (>=2.7)"] name = "jsonschema" version = "4.17.3" description = "An implementation of JSON Schema validation for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -769,6 +804,7 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-spec" version = "0.1.4" description = "JSONSchema Spec with object-oriented paths" +category = "dev" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ @@ -786,6 +822,7 @@ typing-extensions = ">=4.3.0,<5.0.0" name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -831,6 +868,7 @@ files = [ name = "markdown-it-py" version = "2.2.0" description = "Python port of markdown-it. Markdown parsing, done right!" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -856,6 +894,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -879,6 +918,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -915,6 +964,7 @@ files = [ name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -926,6 +976,7 @@ files = [ name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -937,6 +988,7 @@ files = [ name = "mypy" version = "1.4.1" description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -984,6 +1036,7 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -995,6 +1048,7 @@ files = [ name = "nodeenv" version = "1.8.0" description = "Node.js virtual environment builder" +category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" files = [ @@ -1005,10 +1059,49 @@ files = [ [package.dependencies] setuptools = "*" +[[package]] +name = "numpy" +version = "1.21.1" +description = "NumPy is the fundamental package for array computing with Python." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "numpy-1.21.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38e8648f9449a549a7dfe8d8755a5979b45b3538520d1e735637ef28e8c2dc50"}, + {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fd7d7409fa643a91d0a05c7554dd68aa9c9bb16e186f6ccfe40d6e003156e33a"}, + {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a75b4498b1e93d8b700282dc8e655b8bd559c0904b3910b144646dbbbc03e062"}, + {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1412aa0aec3e00bc23fbb8664d76552b4efde98fb71f60737c83efbac24112f1"}, + {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e46ceaff65609b5399163de5893d8f2a82d3c77d5e56d976c8b5fb01faa6b671"}, + {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c6a2324085dd52f96498419ba95b5777e40b6bcbc20088fddb9e8cbb58885e8e"}, + {file = "numpy-1.21.1-cp37-cp37m-win32.whl", hash = "sha256:73101b2a1fef16602696d133db402a7e7586654682244344b8329cdcbbb82172"}, + {file = "numpy-1.21.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7a708a79c9a9d26904d1cca8d383bf869edf6f8e7650d85dbc77b041e8c5a0f8"}, + {file = "numpy-1.21.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95b995d0c413f5d0428b3f880e8fe1660ff9396dcd1f9eedbc311f37b5652e16"}, + {file = "numpy-1.21.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:635e6bd31c9fb3d475c8f44a089569070d10a9ef18ed13738b03049280281267"}, + {file = "numpy-1.21.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a3d5fb89bfe21be2ef47c0614b9c9c707b7362386c9a3ff1feae63e0267ccb6"}, + {file = "numpy-1.21.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8a326af80e86d0e9ce92bcc1e65c8ff88297de4fa14ee936cb2293d414c9ec63"}, + {file = "numpy-1.21.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:791492091744b0fe390a6ce85cc1bf5149968ac7d5f0477288f78c89b385d9af"}, + {file = "numpy-1.21.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0318c465786c1f63ac05d7c4dbcecd4d2d7e13f0959b01b534ea1e92202235c5"}, + {file = "numpy-1.21.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a513bd9c1551894ee3d31369f9b07460ef223694098cf27d399513415855b68"}, + {file = "numpy-1.21.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:91c6f5fc58df1e0a3cc0c3a717bb3308ff850abdaa6d2d802573ee2b11f674a8"}, + {file = "numpy-1.21.1-cp38-cp38-win32.whl", hash = "sha256:978010b68e17150db8765355d1ccdd450f9fc916824e8c4e35ee620590e234cd"}, + {file = "numpy-1.21.1-cp38-cp38-win_amd64.whl", hash = "sha256:9749a40a5b22333467f02fe11edc98f022133ee1bfa8ab99bda5e5437b831214"}, + {file = "numpy-1.21.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d7a4aeac3b94af92a9373d6e77b37691b86411f9745190d2c351f410ab3a791f"}, + {file = "numpy-1.21.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9e7912a56108aba9b31df688a4c4f5cb0d9d3787386b87d504762b6754fbb1b"}, + {file = "numpy-1.21.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:25b40b98ebdd272bc3020935427a4530b7d60dfbe1ab9381a39147834e985eac"}, + {file = "numpy-1.21.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8a92c5aea763d14ba9d6475803fc7904bda7decc2a0a68153f587ad82941fec1"}, + {file = "numpy-1.21.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05a0f648eb28bae4bcb204e6fd14603de2908de982e761a2fc78efe0f19e96e1"}, + {file = "numpy-1.21.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f01f28075a92eede918b965e86e8f0ba7b7797a95aa8d35e1cc8821f5fc3ad6a"}, + {file = "numpy-1.21.1-cp39-cp39-win32.whl", hash = "sha256:88c0b89ad1cc24a5efbb99ff9ab5db0f9a86e9cc50240177a571fbe9c2860ac2"}, + {file = "numpy-1.21.1-cp39-cp39-win_amd64.whl", hash = "sha256:01721eefe70544d548425a07c80be8377096a54118070b8a62476866d5208e33"}, + {file = "numpy-1.21.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d4d1de6e6fb3d28781c73fbde702ac97f03d79e4ffd6598b880b2d95d62ead4"}, + {file = "numpy-1.21.1.zip", hash = "sha256:dff4af63638afcc57a3dfb9e4b26d434a7a602d225b42d746ea7fe2edf1342fd"}, +] + [[package]] name = "openapi-schema-validator" version = "0.4.4" description = "OpenAPI schema validation for Python" +category = "dev" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ @@ -1027,6 +1120,7 @@ docs = ["sphinx (>=5.3.0,<6.0.0)", "sphinx-immaterial (>=0.11.0,<0.12.0)"] name = "openapi-spec-validator" version = "0.5.7" description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3 spec validator" +category = "dev" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ @@ -1046,6 +1140,7 @@ typing-extensions = {version = ">=4.5.0,<5.0.0", markers = "python_version < \"3 name = "packaging" version = "23.2" description = "Core utilities for Python packages" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1057,6 +1152,7 @@ files = [ name = "pathable" version = "0.4.3" description = "Object-oriented paths" +category = "dev" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ @@ -1068,6 +1164,7 @@ files = [ name = "pathspec" version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1079,6 +1176,7 @@ files = [ name = "pillow" version = "9.5.0" description = "Python Imaging Library (Fork)" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1158,6 +1256,7 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1169,6 +1268,7 @@ files = [ name = "platformdirs" version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1187,6 +1287,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.2.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1205,6 +1306,7 @@ testing = ["pytest", "pytest-benchmark"] name = "prance" version = "0.21.8.0" description = "Resolving Swagger/OpenAPI 2.0 and 3.0.0 Parser" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1231,6 +1333,7 @@ ssv = ["swagger-spec-validator (>=2.4,<3.0)"] name = "pre-commit" version = "2.21.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1250,6 +1353,7 @@ virtualenv = ">=20.10.0" name = "pydantic" version = "2.4.2" description = "Data validation using Python type hints" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1270,6 +1374,7 @@ email = ["email-validator (>=2.0.0)"] name = "pydantic-core" version = "2.10.1" description = "" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1388,6 +1493,7 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" name = "pydantic-settings" version = "2.0.3" description = "Settings management using Pydantic" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1403,6 +1509,7 @@ python-dotenv = ">=0.21.0" name = "pygments" version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1417,6 +1524,7 @@ plugins = ["importlib-metadata"] name = "pylint" version = "2.13.9" description = "python code static checker" +category = "dev" optional = false python-versions = ">=3.6.2" files = [ @@ -1441,6 +1549,7 @@ testutil = ["gitpython (>3)"] name = "pyrsistent" version = "0.19.3" description = "Persistent/Functional/Immutable data structures" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1477,6 +1586,7 @@ files = [ name = "pysnooper" version = "1.2.0" description = "A poor man's debugger for Python." +category = "dev" optional = false python-versions = "*" files = [ @@ -1491,6 +1601,7 @@ tests = ["pytest"] name = "pytest" version = "7.4.2" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1514,6 +1625,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "3.0.0" description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1532,6 +1644,7 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-markdown-docs" version = "0.4.3" description = "Run markdown code fences through pytest" +category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -1546,6 +1659,7 @@ markdown-it-py = ">=2.2.0,<2.3.0" name = "pytest-mock" version = "3.11.1" description = "Thin-wrapper around the mock package for easier use with pytest" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1563,6 +1677,7 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -1575,13 +1690,14 @@ six = ">=1.5" [[package]] name = "python-dotenv" -version = "0.21.1" +version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" +category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "python-dotenv-0.21.1.tar.gz", hash = "sha256:1c93de8f636cde3ce377292818d0e440b6e45a82f215c3744979151fa8151c49"}, - {file = "python_dotenv-0.21.1-py3-none-any.whl", hash = "sha256:41e12e0318bebc859fcc4d97d4db8d20ad21721a6aa5047dd59f090391cb549a"}, + {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, + {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, ] [package.extras] @@ -1591,6 +1707,7 @@ cli = ["click (>=5.0)"] name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1599,6 +1716,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1606,8 +1724,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1624,6 +1749,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1631,6 +1757,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1640,6 +1767,7 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1661,6 +1789,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "rfc3339-validator" version = "0.1.4" description = "A pure python RFC3339 validator" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -1675,6 +1804,7 @@ six = "*" name = "ruamel-yaml" version = "0.17.35" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +category = "dev" optional = false python-versions = ">=3" files = [ @@ -1693,35 +1823,42 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] name = "ruamel-yaml-clib" version = "0.2.8" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +category = "dev" optional = false python-versions = ">=3.6" files = [ {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d92f81886165cb14d7b067ef37e142256f1c6a90a65cd156b063a43da1708cfd"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b5edda50e5e9e15e54a6a8a0070302b00c518a9d32accc2346ad6c984aacd279"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:7048c338b6c86627afb27faecf418768acb6331fc24cfa56c93e8c9780f815fa"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3fcc54cb0c8b811ff66082de1680b4b14cf8a81dce0d4fbf665c2265a81e07a1"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:665f58bfd29b167039f714c6998178d27ccd83984084c286110ef26b230f259f"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9eb5dee2772b0f704ca2e45b1713e4e5198c18f515b52743576d196348f374d3"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, @@ -1732,6 +1869,7 @@ files = [ name = "ruff" version = "0.0.261" description = "An extremely fast Python linter, written in Rust." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1758,6 +1896,7 @@ files = [ name = "semver" version = "2.13.0" description = "Python helper for Semantic Versioning (http://semver.org/)" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1769,6 +1908,7 @@ files = [ name = "setuptools" version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1785,6 +1925,7 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1796,6 +1937,7 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" optional = false python-versions = "*" files = [ @@ -1807,6 +1949,7 @@ files = [ name = "sphinx" version = "7.2.6" description = "Python documentation generator" +category = "dev" optional = false python-versions = ">=3.9" files = [ @@ -1842,6 +1985,7 @@ test = ["cython (>=3.0)", "filelock", "html5lib", "pytest (>=4.6)", "setuptools name = "sphinx-rtd-theme" version = "1.3.0" description = "Read the Docs theme for Sphinx" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1861,6 +2005,7 @@ dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] name = "sphinxcontrib-applehelp" version = "1.0.7" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +category = "dev" optional = false python-versions = ">=3.9" files = [ @@ -1879,6 +2024,7 @@ test = ["pytest"] name = "sphinxcontrib-devhelp" version = "1.0.5" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" +category = "dev" optional = false python-versions = ">=3.9" files = [ @@ -1897,6 +2043,7 @@ test = ["pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.4" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "dev" optional = false python-versions = ">=3.9" files = [ @@ -1915,6 +2062,7 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-jquery" version = "4.1" description = "Extension to include jQuery on newer Sphinx releases" +category = "dev" optional = false python-versions = ">=2.7" files = [ @@ -1929,6 +2077,7 @@ Sphinx = ">=1.8" name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1943,6 +2092,7 @@ test = ["flake8", "mypy", "pytest"] name = "sphinxcontrib-qthelp" version = "1.0.6" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" +category = "dev" optional = false python-versions = ">=3.9" files = [ @@ -1961,6 +2111,7 @@ test = ["pytest"] name = "sphinxcontrib-serializinghtml" version = "1.1.9" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" +category = "dev" optional = false python-versions = ">=3.9" files = [ @@ -1979,6 +2130,7 @@ test = ["pytest"] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1990,6 +2142,7 @@ files = [ name = "toml-sort" version = "0.23.1" description = "Toml sorting library" +category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -2004,6 +2157,7 @@ tomlkit = ">=0.11.2" name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2015,6 +2169,7 @@ files = [ name = "tomlkit" version = "0.12.1" description = "Style preserving TOML library" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2026,6 +2181,7 @@ files = [ name = "typed-ast" version = "1.5.5" description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2076,6 +2232,7 @@ files = [ name = "typer" version = "0.9.0" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2097,6 +2254,7 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6. name = "types-requests" version = "2.31.0.6" description = "Typing stubs for requests" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2111,6 +2269,7 @@ types-urllib3 = "*" name = "types-urllib3" version = "1.26.25.14" description = "Typing stubs for urllib3" +category = "dev" optional = false python-versions = "*" files = [ @@ -2122,6 +2281,7 @@ files = [ name = "typing-extensions" version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2133,6 +2293,7 @@ files = [ name = "urllib3" version = "1.26.17" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -2149,6 +2310,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "virtualenv" version = "20.24.5" description = "Virtual Python Environment builder" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2170,6 +2332,7 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -2254,6 +2417,7 @@ files = [ name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2268,4 +2432,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = ">=3.7.0,<4.0" -content-hash = "efa0586964555cb06603385a123ade083f101af28d45999f1015d0b8a324ffe0" +content-hash = "e8c0b961966be8c217e0d48b71ce4bfd0753bf062eef1dacf1f6508aae8d4d85" diff --git a/pyproject.toml b/pyproject.toml index b8828095..d3464015 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,6 +45,7 @@ Sphinx = {version = "7.2.6", python = ">=3.9.0,<4.0"} sphinx-rtd-theme = {version = "1.3.0", python = ">=3.9.0,<4.0"} autodoc-pydantic = {version = "2.0.1", python = ">=3.9.0,<4.0"} toml = "0.10.2" +pillow = "^9.0.0" [tool.poetry.scripts] groundlight = "groundlight.cli:groundlight" diff --git a/spec/public-api.yaml b/spec/public-api.yaml index 871f1d65..8dd436ea 100644 --- a/spec/public-api.yaml +++ b/spec/public-api.yaml @@ -12,105 +12,105 @@ paths: operationId: List detectors description: Retrieve a list of detectors. parameters: - - name: page - required: false - in: query - description: A page number within the paginated result set. - schema: - type: integer - - name: page_size - required: false - in: query - description: Number of results to return per page. - schema: - type: integer + - name: page + required: false + in: query + description: A page number within the paginated result set. + schema: + type: integer + - name: page_size + required: false + in: query + description: Number of results to return per page. + schema: + type: integer tags: - - detectors + - detectors security: - - ApiToken: [] + - ApiToken: [] responses: - '200': + "200": content: application/json: schema: - $ref: '#/components/schemas/PaginatedDetectorList' - description: '' + $ref: "#/components/schemas/PaginatedDetectorList" + description: "" post: operationId: Create detector description: Create a new detector. tags: - - detectors + - detectors requestBody: content: application/json: schema: - $ref: '#/components/schemas/DetectorCreationInput' + $ref: "#/components/schemas/DetectorCreationInput" application/x-www-form-urlencoded: schema: - $ref: '#/components/schemas/DetectorCreationInput' + $ref: "#/components/schemas/DetectorCreationInput" multipart/form-data: schema: - $ref: '#/components/schemas/DetectorCreationInput' + $ref: "#/components/schemas/DetectorCreationInput" required: true security: - - ApiToken: [] + - ApiToken: [] responses: - '201': + "201": content: application/json: schema: - $ref: '#/components/schemas/Detector' - description: '' + $ref: "#/components/schemas/Detector" + description: "" /v1/detectors/{id}: get: operationId: Get detector description: Retrieve a detector by its ID. parameters: - - in: path - name: id - schema: - type: string - description: Choose a detector by its ID. - required: true + - in: path + name: id + schema: + type: string + description: Choose a detector by its ID. + required: true tags: - - detectors + - detectors security: - - ApiToken: [] + - ApiToken: [] responses: - '200': + "200": content: application/json: schema: - $ref: '#/components/schemas/Detector' - description: '' + $ref: "#/components/schemas/Detector" + description: "" /v1/image-queries: get: operationId: List image queries description: Retrieve a list of image-queries. parameters: - - name: page - required: false - in: query - description: A page number within the paginated result set. - schema: - type: integer - - name: page_size - required: false - in: query - description: Number of results to return per page. - schema: - type: integer + - name: page + required: false + in: query + description: A page number within the paginated result set. + schema: + type: integer + - name: page_size + required: false + in: query + description: Number of results to return per page. + schema: + type: integer tags: - - image-queries + - image-queries security: - - ApiToken: [] + - ApiToken: [] responses: - '200': + "200": content: application/json: schema: - $ref: '#/components/schemas/PaginatedImageQueryList' - description: '' + $ref: "#/components/schemas/PaginatedImageQueryList" + description: "" post: operationId: Submit image query description: |2+ @@ -126,30 +126,35 @@ paths: ``` parameters: - - in: query - name: detector_id - schema: - type: string - description: Choose a detector by its ID. - required: true - - in: query - name: human_review - schema: - type: string - description: > - If set to `DEFAULT`, use the regular escalation logic (i.e., send the image query for human review if the ML model is not confident). - If set to `ALWAYS`, always send the image query for human review even if the ML model is confident. - If set to `NEVER`, never send the image query for human review even if the ML model is not confident. - required: false - - in: query - name: patience_time - schema: - type: number - format: float - description: How long to wait for a confident response. - required: false + - in: query + name: detector_id + schema: + type: string + description: Choose a detector by its ID. + required: true + - in: query + name: human_review + schema: + type: string + description: > + If set to `DEFAULT`, use the regular escalation logic (i.e., send the image query for human review if the ML model is not confident). + If set to `ALWAYS`, always send the image query for human review even if the ML model is confident. + If set to `NEVER`, never send the image query for human review even if the ML model is not confident. + required: false + - in: query + name: patience_time + schema: + type: number + format: float + description: How long to wait for a confident response. + required: false + - in: query + name: want_async + schema: + type: string + description: If "true" then submitting an image query returns immediately without a result. The result will be computed asynchronously and can be retrieved later. tags: - - image-queries + - image-queries requestBody: content: image/jpeg: @@ -158,44 +163,45 @@ paths: format: binary examples: JPEGBinaryImageData: - value: '@path/to/image.jpeg' + value: "@path/to/image.jpeg" summary: JPEG binary image data security: - - ApiToken: [] + - ApiToken: [] responses: - '201': + "201": content: application/json: schema: - $ref: '#/components/schemas/ImageQuery' - description: '' + $ref: "#/components/schemas/ImageQuery" + description: "" /v1/image-queries/{id}: get: operationId: Get image query description: Retrieve an image-query by its ID. parameters: - - in: path - name: id - schema: - type: string - description: Choose an image query by its ID. - required: true + - in: path + name: id + schema: + type: string + description: Choose an image query by its ID. + required: true tags: - - image-queries + - image-queries security: - - ApiToken: [] + - ApiToken: [] responses: - '200': + "200": content: application/json: schema: - $ref: '#/components/schemas/ImageQuery' - description: '' + $ref: "#/components/schemas/ImageQuery" + description: "" components: schemas: ClassificationResult: type: object - description: Our classification result. This result can come from the detector, + description: + Our classification result. This result can come from the detector, or a human reviewer. properties: confidence: @@ -203,13 +209,14 @@ components: maximum: 1 minimum: 0 nullable: true - description: On a scale of 0 to 1, how confident are we in the predicted + description: + On a scale of 0 to 1, how confident are we in the predicted label? label: type: string description: What is the predicted label? required: - - label + - label x-internal: true Detector: type: object @@ -221,7 +228,7 @@ components: description: A unique ID for this object. type: allOf: - - $ref: '#/components/schemas/DetectorTypeEnum' + - $ref: "#/components/schemas/DetectorTypeEnum" readOnly: true description: The type of this object. created_at: @@ -247,15 +254,16 @@ components: maximum: 1.0 minimum: 0.0 default: 0.9 - description: If the detector's prediction is below this confidence threshold, + description: + If the detector's prediction is below this confidence threshold, send the image query for human review. required: - - created_at - - group_name - - id - - name - - query - - type + - created_at + - group_name + - id + - name + - query + - type x-internal: true DetectorCreationInput: type: object @@ -279,7 +287,8 @@ components: maximum: 1.0 minimum: 0.0 default: 0.9 - description: If the detector's prediction is below this confidence threshold, + description: + If the detector's prediction is below this confidence threshold, send the image query for human review. pipeline_config: type: string @@ -287,13 +296,13 @@ components: description: (Advanced usage) Configuration to instantiate a specific prediction pipeline. maxLength: 8192 required: - # TODO: make name optional - that's how the web version is going. - - name - - query + # TODO: make name optional - that's how the web version is going. + - name + - query x-internal: true DetectorTypeEnum: enum: - - detector + - detector type: string ImageQuery: type: object @@ -305,7 +314,7 @@ components: description: A unique ID for this object. type: allOf: - - $ref: '#/components/schemas/ImageQueryTypeEnum' + - $ref: "#/components/schemas/ImageQueryTypeEnum" readOnly: true description: The type of this object. created_at: @@ -323,25 +332,24 @@ components: description: Which detector was used on this image query? result_type: allOf: - - $ref: '#/components/schemas/ResultTypeEnum' + - $ref: "#/components/schemas/ResultTypeEnum" readOnly: true description: What type of result are we returning? result: allOf: - - $ref: '#/components/schemas/ClassificationResult' + - $ref: "#/components/schemas/ClassificationResult" readOnly: true required: - - created_at - - detector_id - - id - - query - - result - - result_type - - type + - created_at + - detector_id + - id + - query + - result_type + - type x-internal: true ImageQueryTypeEnum: enum: - - image_query + - image_query type: string PaginatedDetectorList: type: object @@ -362,7 +370,7 @@ components: results: type: array items: - $ref: '#/components/schemas/Detector' + $ref: "#/components/schemas/Detector" PaginatedImageQueryList: type: object properties: @@ -382,10 +390,10 @@ components: results: type: array items: - $ref: '#/components/schemas/ImageQuery' + $ref: "#/components/schemas/ImageQuery" ResultTypeEnum: enum: - - binary_classification + - binary_classification type: string securitySchemes: ApiToken: @@ -393,11 +401,11 @@ components: type: apiKey in: header servers: -- url: https://api.groundlight.ai/device-api - description: Prod -- url: https://api.integ.groundlight.ai/device-api - description: Integ -- url: https://device.positronix.ai/device-api - description: Device Prod -- url: https://device.integ.positronix.ai/device-api - description: Device Integ + - url: https://api.groundlight.ai/device-api + description: Prod + - url: https://api.integ.groundlight.ai/device-api + description: Integ + - url: https://device.positronix.ai/device-api + description: Device Prod + - url: https://device.integ.positronix.ai/device-api + description: Device Integ diff --git a/sphinx_docs/conf.py b/sphinx_docs/conf.py index e05c8d8b..d7aa69ee 100644 --- a/sphinx_docs/conf.py +++ b/sphinx_docs/conf.py @@ -45,4 +45,4 @@ def get_version_name() -> str: # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output html_theme = "sphinx_rtd_theme" -html_static_path = ["_static"] \ No newline at end of file +html_static_path = ["_static"] diff --git a/src/groundlight/client.py b/src/groundlight/client.py index dc56dee1..8eef7c17 100644 --- a/src/groundlight/client.py +++ b/src/groundlight/client.py @@ -13,12 +13,7 @@ from groundlight.binary_labels import Label, convert_display_label_to_internal, convert_internal_label_to_display from groundlight.config import API_TOKEN_VARIABLE_NAME, API_TOKEN_WEB_URL from groundlight.images import ByteStreamWrapper, parse_supported_image_types -from groundlight.internalapi import ( - GroundlightApiClient, - NotFoundError, - iq_is_confident, - sanitize_endpoint_url, -) +from groundlight.internalapi import GroundlightApiClient, NotFoundError, iq_is_confident, sanitize_endpoint_url from groundlight.optional_imports import Image, np logger = logging.getLogger("groundlight.sdk") @@ -107,7 +102,10 @@ def _fixup_image_query(iq: ImageQuery) -> ImageQuery: Process the wire-format image query to make it more usable. """ # Note: This might go away once we clean up the mapping logic server-side. - iq.result.label = convert_internal_label_to_display(iq, iq.result.label) + + # we have to check that result is not None because the server will return a result of None if want_async=True + if iq.result is not None: + iq.result.label = convert_internal_label_to_display(iq, iq.result.label) return iq def get_detector(self, id: Union[str, Detector]) -> Detector: # pylint: disable=redefined-builtin @@ -285,6 +283,7 @@ def submit_image_query( # noqa: PLR0913 # pylint: disable=too-many-arguments image: Union[str, bytes, Image.Image, BytesIO, BufferedReader, np.ndarray], wait: Optional[float] = None, human_review: Optional[str] = None, + want_async: bool = False, inspection_id: Optional[str] = None, ) -> ImageQuery: """ @@ -313,6 +312,11 @@ def submit_image_query( # noqa: PLR0913 # pylint: disable=too-many-arguments If set to `NEVER`, never send the image query for human review. :type human_review: str + :param want_async: If True, the client will return as soon as the image query is submitted and will not wait for + an ML/human prediction. The returned `ImageQuery` will have a `result` of None. Must set `wait` to 0 to use + want_async. + :type want_async: bool + :param inspection_id: Most users will omit this. For accounts with Inspection Reports enabled, this is the ID of the inspection to associate with the image query. :type inspection_id: str @@ -336,6 +340,15 @@ def submit_image_query( # noqa: PLR0913 # pylint: disable=too-many-arguments if human_review is not None: params["human_review"] = human_review + if want_async is True: + # If want_async is True, we don't want to wait for a result. As a result wait must be set to 0 to use + # want_async. + if wait != 0: + raise ValueError( + "wait must be set to 0 to use want_async. Using wait and want_async at the same time is incompatible." # noqa: E501 + ) + params["want_async"] = str(bool(want_async)) + # If no inspection_id is provided, we submit the image query using image_queries_api (autogenerated via OpenAPI) # However, our autogenerated code does not currently support inspection_id, so if an inspection_id was # provided, we use the private API client instead. @@ -347,11 +360,56 @@ def submit_image_query( # noqa: PLR0913 # pylint: disable=too-many-arguments iq_id = self.api_client.submit_image_query_with_inspection(**params) image_query = self.get_image_query(iq_id) - if wait: + if wait > 0: threshold = self.get_detector(detector).confidence_threshold image_query = self.wait_for_confident_result(image_query, confidence_threshold=threshold, timeout_sec=wait) + return self._fixup_image_query(image_query) + def ask_async( + self, + detector: Union[Detector, str], + image: Union[str, bytes, Image.Image, BytesIO, BufferedReader, np.ndarray], + human_review: Optional[str] = None, + inspection_id: Optional[str] = None, + ) -> ImageQuery: + """ + Convenience method for submitting an `ImageQuery` asynchronously. This is equivalent to calling + `submit_image_query` with `want_async=True` and `wait=0`. Use `get_image_query` to retrieve the `result` of the + ImageQuery. + + :param detector: the Detector object, or string id of a detector like `det_12345` + :type detector: Detector or str + + :param image: The image, in several possible formats: + + - filename (string) of a jpeg file + - byte array or BytesIO or BufferedReader with jpeg bytes + - numpy array with values 0-255 and dimensions (H,W,3) in BGR order + (Note OpenCV uses BGR not RGB. `img[:, :, ::-1]` will reverse the channels) + - PIL Image: Any binary format must be JPEG-encoded already. + Any pixel format will get converted to JPEG at high quality before sending to service. + + :type image: str or bytes or Image.Image or BytesIO or BufferedReader or np.ndarray + + :param human_review: If `None` or `DEFAULT`, send the image query for human review + only if the ML prediction is not confident. + If set to `ALWAYS`, always send the image query for human review. + If set to `NEVER`, never send the image query for human review. + :type human_review: str + + :param inspection_id: Most users will omit this. For accounts with Inspection Reports enabled, + this is the ID of the inspection to associate with the image query. + :type inspection_id: str + + :return ImageQuery + :rtype ImageQuery + + """ + return self.submit_image_query( + detector, image, wait=0, human_review=human_review, want_async=True, inspection_id=inspection_id + ) + def wait_for_confident_result( self, image_query: Union[ImageQuery, str], diff --git a/test/integration/test_groundlight.py b/test/integration/test_groundlight.py index b1486c19..7697939b 100644 --- a/test/integration/test_groundlight.py +++ b/test/integration/test_groundlight.py @@ -1,6 +1,7 @@ # Optional star-imports are weird and not usually recommended ... # ruff: noqa: F403,F405 # pylint: disable=wildcard-import,unused-wildcard-import,redefined-outer-name,import-outside-toplevel +import time from datetime import datetime from typing import Any @@ -243,6 +244,62 @@ def test_submit_image_query_pil(gl: Groundlight, detector: Detector): _image_query = gl.submit_image_query(detector=detector.id, image=black) +def test_submit_image_query_wait_and_want_async_causes_exception(gl: Groundlight, detector: Detector): + """ + Tests that attempting to use the wait and want_async parameters together causes an exception. + """ + + with pytest.raises(ValueError): + _image_query = gl.submit_image_query( + detector=detector.id, image="test/assets/dog.jpeg", wait=10, want_async=True + ) + + +def test_submit_image_query_with_want_async_workflow(gl: Groundlight, detector: Detector): + """ + Tests the workflow for submitting an image query with the want_async parameter set to True. + """ + + _image_query = gl.submit_image_query(detector=detector.id, image="test/assets/dog.jpeg", wait=0, want_async=True) + + # the result should be None + assert _image_query.result is None + + # attempting to access fields within the result should raise an exception + with pytest.raises(AttributeError): + _ = _image_query.result.label # type: ignore + with pytest.raises(AttributeError): + _ = _image_query.result.confidence # type: ignore + time.sleep(5) + # you should be able to get a "real" result by retrieving an updated image query object from the server + _image_query = gl.get_image_query(id=_image_query.id) + assert _image_query.result is not None + assert _image_query.result.label in VALID_DISPLAY_LABELS + + +def test_ask_async_workflow(gl: Groundlight, detector: Detector): + """ + Tests the workflow for submitting an image query with ask_async. + """ + _image_query = gl.ask_async(detector=detector.id, image="test/assets/dog.jpeg") + + # the result should be None + assert _image_query.result is None + + # attempting to access fields within the result should raise an exception + with pytest.raises(AttributeError): + _ = _image_query.result.label # type: ignore + with pytest.raises(AttributeError): + _ = _image_query.result.confidence # type: ignore + + time.sleep(5) + + # you should be able to get a "real" result by retrieving an updated image query object from the server + _image_query = gl.get_image_query(id=_image_query.id) + assert _image_query.result is not None + assert _image_query.result.label in VALID_DISPLAY_LABELS + + def test_list_image_queries(gl: Groundlight): image_queries = gl.list_image_queries() assert str(image_queries) From 59dcc763c541ad9957e66c96a6a45521015800c5 Mon Sep 17 00:00:00 2001 From: Brandon <132288221+brandon-groundlight@users.noreply.github.com> Date: Mon, 16 Oct 2023 17:13:35 -0700 Subject: [PATCH 05/14] Cli polishing (#109) * Add basic catch if api token isn't specified when cli is called * Pushes Groundlight class instantiation up until the function is actually called with arguments. This means that the entire help args are available even if we can't instantiate the class (no api key) * Fixed misunderstanding with metaprogramming, added tests * Addressing comments --------- Co-authored-by: Auto-format Bot --- src/groundlight/cli.py | 44 +++++++++++++++++++++++++++------------ src/groundlight/client.py | 10 ++------- src/groundlight/config.py | 6 ++++++ test/unit/test_cli.py | 13 ++++++++++-- 4 files changed, 50 insertions(+), 23 deletions(-) diff --git a/src/groundlight/cli.py b/src/groundlight/cli.py index 9c967fae..7ea962d5 100644 --- a/src/groundlight/cli.py +++ b/src/groundlight/cli.py @@ -5,18 +5,36 @@ from typing_extensions import get_origin from groundlight import Groundlight +from groundlight.client import ApiTokenError +from groundlight.config import API_TOKEN_HELP_MESSAGE -cli_app = typer.Typer(no_args_is_help=True, context_settings={"help_option_names": ["-h", "--help"]}) +cli_app = typer.Typer( + no_args_is_help=True, + context_settings={"help_option_names": ["-h", "--help"], "max_content_width": 800}, +) def class_func_to_cli(method): """ - Given the class method, simplify the typing on the inputs so that Typer can accept the method + Given the class method, create a method with the identical signature to provide the help documentation and + but only instantiates the class when the method is actually called. """ - @wraps(method) + # We create a fake class and fake method so we have the correct annotations for typer to use + # When we wrap the fake method, we only use the fake method's name to access the real method + # and attach it to a Groundlight instance that we create at function call time + class FakeClass: + pass + + fake_instance = FakeClass() + fake_method = method.__get__(fake_instance, FakeClass) + + @wraps(fake_method) def wrapper(*args, **kwargs): - print(method(*args, **kwargs)) # this is where we output to the console + gl = Groundlight() + gl_method = vars(Groundlight)[fake_method.__name__] + gl_bound_method = gl_method.__get__(gl, Groundlight) + print(gl_bound_method(*args, **kwargs)) # this is where we output to the console # not recommended practice to directly change annotations, but gets around Typer not supporting Union types for name, annotation in method.__annotations__.items(): @@ -30,15 +48,15 @@ def wrapper(*args, **kwargs): def groundlight(): - gl = Groundlight() - - # For each method in the Groundlight class, create a function that can be called from the command line - for name, method in vars(Groundlight).items(): - if callable(method) and not name.startswith("_"): - attached_method = method.__get__(gl) - cli_func = class_func_to_cli(attached_method) - cli_app.command()(cli_func) - cli_app() + try: + # For each method in the Groundlight class, create a function that can be called from the command line + for name, method in vars(Groundlight).items(): + if callable(method) and not name.startswith("_"): + cli_func = class_func_to_cli(method) + cli_app.command()(cli_func) + cli_app() + except ApiTokenError: + print(API_TOKEN_HELP_MESSAGE) if __name__ == "__main__": diff --git a/src/groundlight/client.py b/src/groundlight/client.py index 8eef7c17..023468e3 100644 --- a/src/groundlight/client.py +++ b/src/groundlight/client.py @@ -11,7 +11,7 @@ from openapi_client.model.detector_creation_input import DetectorCreationInput from groundlight.binary_labels import Label, convert_display_label_to_internal, convert_internal_label_to_display -from groundlight.config import API_TOKEN_VARIABLE_NAME, API_TOKEN_WEB_URL +from groundlight.config import API_TOKEN_HELP_MESSAGE, API_TOKEN_VARIABLE_NAME from groundlight.images import ByteStreamWrapper, parse_supported_image_types from groundlight.internalapi import GroundlightApiClient, NotFoundError, iq_is_confident, sanitize_endpoint_url from groundlight.optional_imports import Image, np @@ -82,13 +82,7 @@ def __init__(self, endpoint: Optional[str] = None, api_token: Optional[str] = No # Retrieve the API token from environment variable api_token = os.environ[API_TOKEN_VARIABLE_NAME] except KeyError as e: - raise ApiTokenError( - ( - "No API token found. Please put your token in an environment variable " - f'named "{API_TOKEN_VARIABLE_NAME}". If you don\'t have a token, you can ' - f"create one at {API_TOKEN_WEB_URL}" - ), - ) from e + raise ApiTokenError(API_TOKEN_HELP_MESSAGE) from e configuration.api_key["ApiToken"] = api_token diff --git a/src/groundlight/config.py b/src/groundlight/config.py index 2eb592ff..c92acb32 100644 --- a/src/groundlight/config.py +++ b/src/groundlight/config.py @@ -8,3 +8,9 @@ "API_TOKEN_VARIABLE_NAME", "DEFAULT_ENDPOINT", ] + +API_TOKEN_HELP_MESSAGE = ( + "No API token found. Please put your token in an environment variable " + f'named "{API_TOKEN_VARIABLE_NAME}". If you don\'t have a token, you can ' + f"create one at {API_TOKEN_WEB_URL}" +) diff --git a/test/unit/test_cli.py b/test/unit/test_cli.py index 1558245f..9e8b99fe 100644 --- a/test/unit/test_cli.py +++ b/test/unit/test_cli.py @@ -1,6 +1,8 @@ +import os import re import subprocess from datetime import datetime +from unittest.mock import patch def test_list_detector(): @@ -72,12 +74,19 @@ def test_detector_and_image_queries(): assert completed_process.returncode == 0 +@patch.dict(os.environ, {}) def test_help(): completed_process = subprocess.run(["groundlight"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) assert completed_process.returncode == 0 - completed_process = subprocess.run(["groundlight"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) + completed_process = subprocess.run(["groundlight", "-h"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) assert completed_process.returncode == 0 - completed_process = subprocess.run(["groundlight"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) + completed_process = subprocess.run( + ["groundlight", "--help"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True + ) + assert completed_process.returncode == 0 + completed_process = subprocess.run( + ["groundlight", "get-detector", "-h"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True + ) assert completed_process.returncode == 0 From 9eecd107604a4bc13e766bb98953520be6e62e13 Mon Sep 17 00:00:00 2001 From: Brandon <132288221+brandon-groundlight@users.noreply.github.com> Date: Mon, 16 Oct 2023 18:31:27 -0700 Subject: [PATCH 06/14] Add ask_confident and ask_ml (#99) * Adding ask_confident and ask_fast * Automatically reformatting code * Fixing ask_ml behavior * Adding to test * Automatically reformatting code * set default wait for ask_ml * Unhide wait functions, merging logic, fixed iq_is_answered logic * Automatically reformatting code * Rewriting doc strings in Sphinx style * ask_fast to ask_ml in the tests * fixed sphinx docstring return types * Cleaning the lint trap * Last bits of lint * Making iq submission with inspection work with newly optional patience time * single char typo * Reorder functions to trick Git's LCS alg to be correct * Automatically reformatting code --------- Co-authored-by: Auto-format Bot --- src/groundlight/client.py | 230 ++++++++++++++++++++++----- src/groundlight/internalapi.py | 16 +- test/integration/test_groundlight.py | 51 +++++- 3 files changed, 253 insertions(+), 44 deletions(-) diff --git a/src/groundlight/client.py b/src/groundlight/client.py index 023468e3..9ced39dd 100644 --- a/src/groundlight/client.py +++ b/src/groundlight/client.py @@ -2,7 +2,7 @@ import os import time from io import BufferedReader, BytesIO -from typing import Optional, Union +from typing import Callable, Optional, Union from model import Detector, ImageQuery, PaginatedDetectorList, PaginatedImageQueryList from openapi_client import Configuration @@ -13,7 +13,13 @@ from groundlight.binary_labels import Label, convert_display_label_to_internal, convert_internal_label_to_display from groundlight.config import API_TOKEN_HELP_MESSAGE, API_TOKEN_VARIABLE_NAME from groundlight.images import ByteStreamWrapper, parse_supported_image_types -from groundlight.internalapi import GroundlightApiClient, NotFoundError, iq_is_confident, sanitize_endpoint_url +from groundlight.internalapi import ( + GroundlightApiClient, + NotFoundError, + iq_is_answered, + iq_is_confident, + sanitize_endpoint_url, +) from groundlight.optional_imports import Image, np logger = logging.getLogger("groundlight.sdk") @@ -24,7 +30,8 @@ class ApiTokenError(Exception): class Groundlight: - """Client for accessing the Groundlight cloud service. + """ + Client for accessing the Groundlight cloud service. The API token (auth) is specified through the **GROUNDLIGHT_API_TOKEN** environment variable by default. @@ -70,8 +77,8 @@ def __init__(self, endpoint: Optional[str] = None, api_token: Optional[str] = No If unset, fallback to the environment variable "GROUNDLIGHT_API_TOKEN". :type api_token: str - :return Groundlight client - :rtype Groundlight + :return: Groundlight client + :rtype: Groundlight """ # Specify the endpoint self.endpoint = sanitize_endpoint_url(endpoint) @@ -109,8 +116,8 @@ def get_detector(self, id: Union[str, Detector]) -> Detector: # pylint: disable :param id: the detector id :type id: str or Detector - :return Detector - :rtype Detector + :return: Detector + :rtype: Detector """ if isinstance(id, Detector): @@ -126,8 +133,8 @@ def get_detector_by_name(self, name: str) -> Detector: :param name: the detector name :type name: str - :return Detector - :rtype Detector + :return: Detector + :rtype: Detector """ return self.api_client._get_detector_by_name(name) # pylint: disable=protected-access @@ -141,8 +148,8 @@ def list_detectors(self, page: int = 1, page_size: int = 10) -> PaginatedDetecto :param page_size: the page size :type page_size: int - :return PaginatedDetectorList - :rtype PaginatedDetectorList + :return: PaginatedDetectorList + :rtype: PaginatedDetectorList """ obj = self.detectors_api.list_detectors(page=page, page_size=page_size) return PaginatedDetectorList.parse_obj(obj.to_dict()) @@ -170,8 +177,8 @@ def create_detector( :param pipeline_config: the pipeline config :type pipeline_config: str - :return Detector - :rtype Detector + :return: Detector + :rtype: Detector """ detector_creation_input = DetectorCreationInput(name=name, query=query) if confidence_threshold is not None: @@ -206,8 +213,8 @@ def get_or_create_detector( :param pipeline_config: the pipeline config :type pipeline_config: str - :return Detector - :rtype Detector + :return: Detector + :rtype: Detector """ try: existing_detector = self.get_detector_by_name(name) @@ -245,8 +252,8 @@ def get_image_query(self, id: str) -> ImageQuery: # pylint: disable=redefined-b :param id: the image query id :type id: str - :return ImageQuery - :rtype ImageQuery + :return: ImageQuery + :rtype: ImageQuery """ obj = self.image_queries_api.get_image_query(id=id) iq = ImageQuery.parse_obj(obj.to_dict()) @@ -262,8 +269,8 @@ def list_image_queries(self, page: int = 1, page_size: int = 10) -> PaginatedIma :param page_size: the page size :type page_size: int - :return PaginatedImageQueryList - :rtype PaginatedImageQueryList + :return: PaginatedImageQueryList + :rtype: PaginatedImageQueryList """ obj = self.image_queries_api.list_image_queries(page=page, page_size=page_size) image_queries = PaginatedImageQueryList.parse_obj(obj.to_dict()) @@ -276,6 +283,8 @@ def submit_image_query( # noqa: PLR0913 # pylint: disable=too-many-arguments detector: Union[Detector, str], image: Union[str, bytes, Image.Image, BytesIO, BufferedReader, np.ndarray], wait: Optional[float] = None, + patience_time: Optional[float] = None, + confidence_threshold: Optional[float] = None, human_review: Optional[str] = None, want_async: bool = False, inspection_id: Optional[str] = None, @@ -287,14 +296,12 @@ def submit_image_query( # noqa: PLR0913 # pylint: disable=too-many-arguments :type detector: Detector or str :param image: The image, in several possible formats: - - filename (string) of a jpeg file - byte array or BytesIO or BufferedReader with jpeg bytes - numpy array with values 0-255 and dimensions (H,W,3) in BGR order (Note OpenCV uses BGR not RGB. `img[:, :, ::-1]` will reverse the channels) - PIL Image: Any binary format must be JPEG-encoded already. Any pixel format will get converted to JPEG at high quality before sending to service. - :type image: str or bytes or Image.Image or BytesIO or BufferedReader or np.ndarray :param wait: How long to wait (in seconds) for a confident answer. @@ -315,8 +322,8 @@ def submit_image_query( # noqa: PLR0913 # pylint: disable=too-many-arguments this is the ID of the inspection to associate with the image query. :type inspection_id: str - :return ImageQuery - :rtype ImageQuery + :return: ImageQuery + :rtype: ImageQuery """ if wait is None: wait = self.DEFAULT_WAIT @@ -326,10 +333,8 @@ def submit_image_query( # noqa: PLR0913 # pylint: disable=too-many-arguments image_bytesio: ByteStreamWrapper = parse_supported_image_types(image) params = {"detector_id": detector_id, "body": image_bytesio} - if wait == 0: - params["patience_time"] = self.DEFAULT_WAIT - else: - params["patience_time"] = wait + if patience_time is not None: + params["patience_time"] = patience_time if human_review is not None: params["human_review"] = human_review @@ -355,11 +360,89 @@ def submit_image_query( # noqa: PLR0913 # pylint: disable=too-many-arguments image_query = self.get_image_query(iq_id) if wait > 0: - threshold = self.get_detector(detector).confidence_threshold + if confidence_threshold is None: + threshold = self.get_detector(detector).confidence_threshold + else: + threshold = confidence_threshold image_query = self.wait_for_confident_result(image_query, confidence_threshold=threshold, timeout_sec=wait) return self._fixup_image_query(image_query) + def ask_confident( + self, + detector: Union[Detector, str], + image: Union[str, bytes, Image.Image, BytesIO, BufferedReader, np.ndarray], + confidence_threshold: Optional[float] = None, + wait: Optional[float] = None, + ) -> ImageQuery: + """Evaluates an image with Groundlight waiting until an answer above the confidence threshold + of the detector is reached or the wait period has passed. + :param detector: the Detector object, or string id of a detector like `det_12345` + :type detector: Detector or str + + :param image: The image, in several possible formats: + - filename (string) of a jpeg file + - byte array or BytesIO or BufferedReader with jpeg bytes + - numpy array with values 0-255 and dimensions (H,W,3) in BGR order + (Note OpenCV uses BGR not RGB. `img[:, :, ::-1]` will reverse the channels) + - PIL Image + Any binary format must be JPEG-encoded already. Any pixel format will get + converted to JPEG at high quality before sending to service. + :type image: str or bytes or Image.Image or BytesIO or BufferedReader or np.ndarray + + :param confidence_threshold: The confidence threshold to wait for. + If not set, use the detector's confidence threshold. + :type confidence_threshold: float + + :param wait: How long to wait (in seconds) for a confident answer. + :type wait: float + + :return: ImageQuery + :rtype: ImageQuery + """ + return self.submit_image_query( + detector, + image, + confidence_threshold=confidence_threshold, + wait=wait, + ) + + def ask_ml( + self, + detector: Union[Detector, str], + image: Union[str, bytes, Image.Image, BytesIO, BufferedReader, np.ndarray], + wait: Optional[float] = None, + ) -> ImageQuery: + """Evaluates an image with Groundlight, getting the first answer Groundlight can provide. + :param detector: the Detector object, or string id of a detector like `det_12345` + :type detector: Detector or str + + :param image: The image, in several possible formats: + - filename (string) of a jpeg file + - byte array or BytesIO or BufferedReader with jpeg bytes + - numpy array with values 0-255 and dimensions (H,W,3) in BGR order + (Note OpenCV uses BGR not RGB. `img[:, :, ::-1]` will reverse the channels) + - PIL Image + Any binary format must be JPEG-encoded already. Any pixel format will get + converted to JPEG at high quality before sending to service. + :type image: str or bytes or Image.Image or BytesIO or BufferedReader or np.ndarray + + :param wait: How long to wait (in seconds) for any answer. + :type wait: float + + :return: ImageQuery + :rtype: ImageQuery + """ + iq = self.submit_image_query( + detector, + image, + wait=0, + ) + if iq_is_answered(iq): + return iq + wait = self.DEFAULT_WAIT if wait is None else wait + return self.wait_for_ml_result(iq, timeout_sec=wait) + def ask_async( self, detector: Union[Detector, str], @@ -423,10 +506,51 @@ def wait_for_confident_result( :param timeout_sec: The maximum number of seconds to wait. :type timeout_sec: float - :return ImageQuery - :rtype ImageQuery + :return: ImageQuery + :rtype: ImageQuery + """ + + def confidence_above_thresh(iq): + return iq_is_confident(iq, confidence_threshold=confidence_threshold) + + return self._wait_for_result(image_query, condition=confidence_above_thresh, timeout_sec=timeout_sec) + + def wait_for_ml_result(self, image_query: Union[ImageQuery, str], timeout_sec: float = 30.0) -> ImageQuery: + """Waits for the first ml result to be returned. + Currently this is done by polling with an exponential back-off. + + :param image_query: An ImageQuery object to poll + :type image_query: ImageQuery or str + + :param confidence_threshold: The minimum confidence level required to return before the timeout. + :type confidence_threshold: float + + :param timeout_sec: The maximum number of seconds to wait. + :type timeout_sec: float + + :return: ImageQuery + :rtype: ImageQuery + """ + return self._wait_for_result(image_query, condition=iq_is_answered, timeout_sec=timeout_sec) + + def _wait_for_result( + self, image_query: Union[ImageQuery, str], condition: Callable, timeout_sec: float = 30.0 + ) -> ImageQuery: + """Performs polling with exponential back-off until the condition is met for the image query. + + :param image_query: An ImageQuery object to poll + :type image_query: ImageQuery or str + + :param condition: A callable that takes an ImageQuery and returns True or False + whether to keep waiting for a better result. + :type condition: Callable + + :param timeout_sec: The maximum number of seconds to wait. + :type timeout_sec: float + + :return: ImageQuery + :rtype: ImageQuery """ - # Convert from image_query_id to ImageQuery if needed. if isinstance(image_query, str): image_query = self.get_image_query(image_query) @@ -436,18 +560,15 @@ def wait_for_confident_result( image_query = self._fixup_image_query(image_query) while True: patience_so_far = time.time() - start_time - if iq_is_confident(image_query, confidence_threshold): - logger.debug(f"Confident answer for {image_query} after {patience_so_far:.1f}s") + if condition(image_query): + logger.debug(f"Answer for {image_query} after {patience_so_far:.1f}s") break if patience_so_far >= timeout_sec: logger.debug(f"Timeout after {timeout_sec:.0f}s waiting for {image_query}") break target_delay = min(patience_so_far + next_delay, timeout_sec) sleep_time = max(target_delay - patience_so_far, 0) - logger.debug( - f"Polling ({target_delay:.1f}/{timeout_sec:.0f}s) {image_query} until" - f" confidence>={confidence_threshold:.3f}" - ) + logger.debug(f"Polling ({target_delay:.1f}/{timeout_sec:.0f}s) {image_query} until result is available") time.sleep(sleep_time) next_delay *= self.POLLING_EXPONENTIAL_BACKOFF image_query = self.get_image_query(image_query.id) @@ -465,8 +586,8 @@ def add_label(self, image_query: Union[ImageQuery, str], label: Union[Label, str :param label: The string "YES" or the string "NO" in answer to the query. :type label: Label or str - :return None - :rtype None + :return: None + :rtype: None """ if isinstance(image_query, ImageQuery): image_query_id = image_query.id @@ -482,12 +603,27 @@ def add_label(self, image_query: Union[ImageQuery, str], label: Union[Label, str def start_inspection(self) -> str: """For users with Inspection Reports enabled only. Starts an inspection report and returns the id of the inspection. + + :return: The unique identifier of the inspection. + :rtype: str """ return self.api_client.start_inspection() def update_inspection_metadata(self, inspection_id: str, user_provided_key: str, user_provided_value: str) -> None: """For users with Inspection Reports enabled only. Add/update inspection metadata with the user_provided_key and user_provided_value. + + :param inspection_id: The unique identifier of the inspection. + :type inspection_id: str + + :param user_provided_key: the key in the key/value pair for the inspection metadata. + :type user_provided_key: str + + :param user_provided_value: the value in the key/value pair for the inspection metadata. + :type user_provided_value: str + + :return: None + :rtype: None """ self.api_client.update_inspection_metadata(inspection_id, user_provided_key, user_provided_value) @@ -495,10 +631,22 @@ def stop_inspection(self, inspection_id: str) -> str: """For users with Inspection Reports enabled only. Stops an inspection and raises an exception if the response from the server indicates that the inspection was not successfully stopped. - Returns a str with result of the inspection (either PASS or FAIL). + + :param inspection_id: The unique identifier of the inspection. + :type inspection_id: str + + :return: "PASS" or "FAIL" depending on the result of the inspection. + :rtype: str """ return self.api_client.stop_inspection(inspection_id) def update_detector_confidence_threshold(self, detector_id: str, confidence_threshold: float) -> None: - """Updates the confidence threshold of a detector given a detector_id.""" + """Updates the confidence threshold of a detector given a detector_id. + + :param detector_id: The unique identifier of the detector. + :type detector_id: str + + :return: None + :rtype: None + """ self.api_client.update_detector_confidence_threshold(detector_id, confidence_threshold) diff --git a/src/groundlight/internalapi.py b/src/groundlight/internalapi.py index d317b30b..3d85eb37 100644 --- a/src/groundlight/internalapi.py +++ b/src/groundlight/internalapi.py @@ -71,6 +71,17 @@ def iq_is_confident(iq: ImageQuery, confidence_threshold: float) -> bool: return iq.result.confidence >= confidence_threshold +def iq_is_answered(iq: ImageQuery) -> bool: + """Returns True if the image query has a ML or human label. + Placeholder and special labels (out of domain) have confidences exactly 0.5 + """ + if iq.result.confidence is None: + # Human label + return True + placeholder_confidence = 0.5 + return iq.result.confidence > placeholder_confidence + + class InternalApiError(ApiException, RuntimeError): # TODO: We should really avoid this double inheritance since # both `ApiException` and `RuntimeError` are subclasses of @@ -232,9 +243,9 @@ def _get_detector_by_name(self, name: str) -> Detector: def submit_image_query_with_inspection( # noqa: PLR0913 # pylint: disable=too-many-arguments self, detector_id: str, - patience_time: float, body: ByteStreamWrapper, inspection_id: str, + patience_time: Optional[float] = None, human_review: str = "DEFAULT", ) -> str: """Submits an image query to the API and returns the ID of the image query. @@ -246,8 +257,9 @@ def submit_image_query_with_inspection( # noqa: PLR0913 # pylint: disable=too-m params: Dict[str, Union[str, float, bool]] = { "inspection_id": inspection_id, "predictor_id": detector_id, - "patience_time": patience_time, } + if patience_time is not None: + params["patience_time"] = float(patience_time) # In the API, 'send_notification' is used to control human_review escalation. This will eventually # be deprecated, but for now we need to support it in the following manner: diff --git a/test/integration/test_groundlight.py b/test/integration/test_groundlight.py index 7697939b..71e9da1e 100644 --- a/test/integration/test_groundlight.py +++ b/test/integration/test_groundlight.py @@ -9,12 +9,13 @@ import pytest from groundlight import Groundlight from groundlight.binary_labels import VALID_DISPLAY_LABELS, DeprecatedLabel, Label, convert_internal_label_to_display -from groundlight.internalapi import InternalApiError, NotFoundError +from groundlight.internalapi import InternalApiError, NotFoundError, iq_is_answered from groundlight.optional_imports import * from groundlight.status_codes import is_user_error from model import ClassificationResult, Detector, ImageQuery, PaginatedDetectorList, PaginatedImageQueryList DEFAULT_CONFIDENCE_THRESHOLD = 0.9 +IQ_IMPROVEMENT_THRESHOLD = 0.75 def is_valid_display_result(result: Any) -> bool: @@ -163,6 +164,41 @@ def test_get_detector_by_name(gl: Groundlight, detector: Detector): gl.get_detector_by_name(name="not a real name") +def test_ask_confident(gl: Groundlight, detector: Detector): + _image_query = gl.ask_confident(detector=detector.id, image="test/assets/dog.jpeg", wait=10) + assert str(_image_query) + assert isinstance(_image_query, ImageQuery) + assert is_valid_display_result(_image_query.result) + + +def test_ask_ml(gl: Groundlight, detector: Detector): + _image_query = gl.ask_ml(detector=detector.id, image="test/assets/dog.jpeg", wait=10) + assert str(_image_query) + assert isinstance(_image_query, ImageQuery) + assert is_valid_display_result(_image_query.result) + + +def test_submit_image_query(gl: Groundlight, detector: Detector): + def validate_image_query(_image_query: ImageQuery): + assert str(_image_query) + assert isinstance(_image_query, ImageQuery) + assert is_valid_display_result(_image_query.result) + + _image_query = gl.submit_image_query(detector=detector.id, image="test/assets/dog.jpeg", wait=10) + validate_image_query(_image_query) + _image_query = gl.submit_image_query(detector=detector.id, image="test/assets/dog.jpeg", wait=3) + validate_image_query(_image_query) + _image_query = gl.submit_image_query(detector=detector.id, image="test/assets/dog.jpeg", wait=10, patience_time=20) + validate_image_query(_image_query) + _image_query = gl.submit_image_query(detector=detector.id, image="test/assets/dog.jpeg", human_review="NEVER") + validate_image_query(_image_query) + _image_query = gl.submit_image_query( + detector=detector.id, image="test/assets/dog.jpeg", wait=180, confidence_threshold=0.75 + ) + validate_image_query(_image_query) + assert _image_query.result.confidence >= IQ_IMPROVEMENT_THRESHOLD + + def test_submit_image_query_blocking(gl: Groundlight, detector: Detector): _image_query = gl.submit_image_query(detector=detector.id, image="test/assets/dog.jpeg", wait=10) assert str(_image_query) @@ -489,6 +525,19 @@ def submit_noisy_image(image, label=None): ), f"The detector {detector} quality has not improved after two minutes q.v. {new_dog_query}, {new_cat_query}" +def test_ask_method_quality(gl: Groundlight, detector: Detector): + # asks for some level of quality on how fast ask_ml is and that we will get a confident result from ask_confident + fast_always_yes_iq = gl.ask_ml(detector=detector.id, image="test/assets/dog.jpeg", wait=0) + assert iq_is_answered(fast_always_yes_iq) + name = f"Test {datetime.utcnow()}" # Need a unique name + query = "Is there a dog?" + detector = gl.create_detector(name=name, query=query, confidence_threshold=0.8) + fast_iq = gl.ask_ml(detector=detector.id, image="test/assets/dog.jpeg", wait=0) + assert iq_is_answered(fast_iq) + confident_iq = gl.ask_confident(detector=detector.id, image="test/assets/dog.jpeg", wait=180) + assert confident_iq.result.confidence > IQ_IMPROVEMENT_THRESHOLD + + def test_start_inspection(gl: Groundlight): inspection_id = gl.start_inspection() From a60d888440f4de967da139488f5eef59c0177445 Mon Sep 17 00:00:00 2001 From: brandon Date: Tue, 17 Oct 2023 10:18:58 -0700 Subject: [PATCH 07/14] Linting --- src/groundlight/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/groundlight/client.py b/src/groundlight/client.py index 9ced39dd..27c1250c 100644 --- a/src/groundlight/client.py +++ b/src/groundlight/client.py @@ -278,7 +278,7 @@ def list_image_queries(self, page: int = 1, page_size: int = 10) -> PaginatedIma image_queries.results = [self._fixup_image_query(iq) for iq in image_queries.results] return image_queries - def submit_image_query( # noqa: PLR0913 # pylint: disable=too-many-arguments + def submit_image_query( # noqa: PLR0913 # pylint: disable=too-many-arguments, too-many-locals self, detector: Union[Detector, str], image: Union[str, bytes, Image.Image, BytesIO, BufferedReader, np.ndarray], From 6647bcee8a55c4ca42174bee4a2b6d4256a89af5 Mon Sep 17 00:00:00 2001 From: senorkumar Date: Tue, 17 Oct 2023 11:41:01 -0700 Subject: [PATCH 08/14] addressed self comments --- .../building-applications/async-queries.md | 30 +++++++------------ 1 file changed, 10 insertions(+), 20 deletions(-) diff --git a/docs/docs/building-applications/async-queries.md b/docs/docs/building-applications/async-queries.md index 824f36bf..a8d49998 100644 --- a/docs/docs/building-applications/async-queries.md +++ b/docs/docs/building-applications/async-queries.md @@ -31,28 +31,18 @@ from groundlight import Groundlight detector = gl.get_or_create_detector(name="your_detector_name", query="your_query") -def process_image_query(image_query_id): - ''' - : param image_query_id: the id of the image query to process from the database - ''' - - # retrieve the image query from Groundlight - image_query = gl.get_image_query(id=image_query_id) +while db.next_image_query() is not None: # TODO: this is condition is a bit weird. Maybe just while True???? + image_query_id = db.get_next_image_query_id() # get the next image query id from the database + image_query = gl.get_image_query(id=image_query_id) # retrieve the image query from Groundlight result = image_query.result - # take action based on the result - if result == 'YES': - # take action if the result is YES - pass - elif result == 'NO': - # take action if the result is NO - pass - elif result == 'UNCLEAR' - # take action if the result is UNCLEAR - pass -while True: # TODO: add a way to exit this loop... not sure what makes sense here - image_query_id = db.get_next_image_query_id() # get the next image query id from the database - process_image_query(image_query_id) # process the image query + # take action based on the result of the image query + if result.label == 'YES': + pass # TODO: do something based on your application + elif result.label == 'NO': + pass # TODO: do something based on your application + elif result.label == 'UNCLEAR': + pass # TODO: do something based on your application ``` ## Important Considerations From 3391b7ec3cc507cd660e8101db79fc2dc348291b Mon Sep 17 00:00:00 2001 From: senorkumar Date: Tue, 17 Oct 2023 12:03:10 -0700 Subject: [PATCH 09/14] fix ask_async docstring --- src/groundlight/client.py | 33 +++++++++++++++++++++++++++++++-- 1 file changed, 31 insertions(+), 2 deletions(-) diff --git a/src/groundlight/client.py b/src/groundlight/client.py index 07a1e110..c2c3b508 100644 --- a/src/groundlight/client.py +++ b/src/groundlight/client.py @@ -479,8 +479,37 @@ def ask_async( this is the ID of the inspection to associate with the image query. :type inspection_id: str - :return ImageQuery - :rtype ImageQuery + :return: ImageQuery + :rtype: ImageQuery + + + **Example usage**:: + + gl = Groundlight() + detector = gl.get_or_create_detector( + name="door", + query="Is the door locked?", + confidence_threshold=0.9 + ) + + image_query = gl.ask_async( + detector=detector, + image="path/to/image.jpeg") + + # the image_query will have an id for later retrieval + assert image_query.id is not None + + # Do not attempt to access the result of this query as the result for all async queries + # will be None. Your result is being computed asynchronously and will be available + # later + assert image_query.result is None + + # retrieve the result later or on another machine by calling gl.get_image_query() + # with the id of the image_query above + image_query = gl.get_image_query(image_query.id) + + # now the result will be available for your use + assert image_query.result is not None """ return self.submit_image_query( From a88d01eaee912894586c0b01074399fbfdf6b825 Mon Sep 17 00:00:00 2001 From: senorkumar Date: Tue, 17 Oct 2023 13:13:25 -0700 Subject: [PATCH 10/14] small fixes --- docs/docs/building-applications/async-queries.md | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/docs/docs/building-applications/async-queries.md b/docs/docs/building-applications/async-queries.md index a8d49998..02d28d66 100644 --- a/docs/docs/building-applications/async-queries.md +++ b/docs/docs/building-applications/async-queries.md @@ -3,7 +3,7 @@ Groundlight provides a simple interface for submitting asynchronous queries. This is useful for times in which the thread or machine submitting image queries is not the same thread or machine that will be retrieving and using the results. For example, you might have a forward deployed robot or camera that submits image queries to Groundlight, and a separate server that retrieves the results and takes action based on them. We will refer to these two machines as the **submitting machine** and the **retrieving machine**. ## Setup Submitting Machine -On the **submitting machine**, you will need to install the Groundlight Python SDK. Then you can submit image queries asynchronously using the `ask_async` interface (read the full documentation [here](TODO put link here)). `ask_async` submits your query and returns as soon as the query is submitted. It does not wait for an answer to be available prior to returning to minimize the time your program spends interacting with Groundlight. As a result, the `ImageQuery` object `ask_async` returns lacks a `result` (the `result` field will `None`). This is alright for this use case as the **submitting machine** is not interested in the result. Instead the **submitting machine** just needs to communicate the `ImageQuery.id`s to the **retrieving machine** - this might be done via a database, a message queue, or some other mechanism. For this example, we assume you are using a database where you save the `ImageQuery.id` to it via `db.save(image_query.id)`. +On the **submitting machine**, you will need to install the Groundlight Python SDK. Then you can submit image queries asynchronously using the `ask_async` interface (read the full documentation [here](pathname:///python-sdk/api-reference-docs/#groundlight.client.Groundlight.ask_async)). `ask_async` submits your query and returns as soon as the query is submitted. It does not wait for an answer to be available prior to returning to minimize the time your program spends interacting with Groundlight. As a result, the `ImageQuery` object `ask_async` returns lacks a `result` (the `result` field will `None`). This is acceptable for this use case as the **submitting machine** is not interested in the result. Instead, the **submitting machine** just needs to communicate the `ImageQuery.id`s to the **retrieving machine** - this might be done via a database, a message queue, or some other mechanism. For this example, we assume you are using a database where you save the `ImageQuery.id` to it via `db.save(image_query.id)`. ```python notest from groundlight import Groundlight @@ -25,14 +25,15 @@ cam.release() # Release the camera ``` ## Setup Retrieving Machine -On the **retrieving machine** you will need to install the Groundlight Python SDK. Then you can retrieve the results of the image queries submitted by another machine using `get_image_query`. The **retrieving machine** can then use the `ImageQuery.result` to take action based on the result for whatever application you are building. For this example, we assume your application looks up the next image query to process from a database via `db.get_next_image_query_id()`. +On the **retrieving machine** you will need to install the Groundlight Python SDK. Then you can retrieve the results of the image queries submitted by another machine using `get_image_query`. The **retrieving machine** can then use the `ImageQuery.result` to take action based on the result for whatever application you are building. For this example, we assume your application looks up the next image query to process from a database via `db.get_next_image_query_id()` and that this function returns `None` once all `ImageQuery`s are processed. ```python notest from groundlight import Groundlight detector = gl.get_or_create_detector(name="your_detector_name", query="your_query") -while db.next_image_query() is not None: # TODO: this is condition is a bit weird. Maybe just while True???? - image_query_id = db.get_next_image_query_id() # get the next image query id from the database +image_query_id = db.get_next_image_query_id() + +while image_query_id is not None: image_query = gl.get_image_query(id=image_query_id) # retrieve the image query from Groundlight result = image_query.result @@ -43,10 +44,13 @@ while db.next_image_query() is not None: # TODO: this is condition is a bit wei pass # TODO: do something based on your application elif result.label == 'UNCLEAR': pass # TODO: do something based on your application + + # update image_query_id for next iteration of the loop + image_query_id = db.get_next_image_query_id() ``` ## Important Considerations -When you submit an image query asynchronously, ML prediction on your query is not instant. So attempting to retrieve the result immediately after submitting the query will likely result in an 'UNCLEAR' result as Groundlight is still processing your query. Instead, if your code needs a `result` synchronously we recommend using one of our methods with a polling mechanism to retrieve the result, like `submit_image_query`. +When you submit an image query asynchronously, ML prediction on your query is **not** instant. So attempting to retrieve the result immediately after submitting the query will likely result in an `UNCLEAR` result as Groundlight is still processing your query. Instead, if your code needs a `result` synchronously we recommend using one of our methods with a polling mechanism to retrieve the result. You can see all of the interfaces available in the documentation [here](pathname:///python-sdk/api-reference-docs/#groundlight.client.Groundlight). ```python notest from groundlight import Groundlight @@ -60,4 +64,3 @@ result = image_query.result # This will likely be 'UNCLEAR' as Groundlight is s # TODO: what other considerations are there? - From 1df815e23865aa30de1ad2173985abfafe4ab0bc Mon Sep 17 00:00:00 2001 From: senorkumar Date: Tue, 17 Oct 2023 13:48:16 -0700 Subject: [PATCH 11/14] cleaning up a bit --- .../building-applications/1-grabbing-images.md | 4 ++++ ...h-detectors.md => 2-working-with-detectors.md} | 2 +- ...ing-confidence.md => 3-managing-confidence.md} | 3 +++ .../{handling-errors.md => 4-handling-errors.md} | 4 ++++ .../{async-queries.md => 5-async-queries.md} | 4 ++++ .../building-applications/{edge.md => 6-edge.md} | 6 +++++- .../{industrial.md => 7-industrial.md} | 4 ++++ .../building-applications.md | 15 +++++++++------ docs/docs/getting-started/getting-started.mdx | 2 +- 9 files changed, 35 insertions(+), 9 deletions(-) rename docs/docs/building-applications/{working-with-detectors.md => 2-working-with-detectors.md} (99%) rename docs/docs/building-applications/{5-managing-confidence.md => 3-managing-confidence.md} (98%) rename docs/docs/building-applications/{handling-errors.md => 4-handling-errors.md} (99%) rename docs/docs/building-applications/{async-queries.md => 5-async-queries.md} (99%) rename docs/docs/building-applications/{edge.md => 6-edge.md} (96%) rename docs/docs/building-applications/{industrial.md => 7-industrial.md} (98%) diff --git a/docs/docs/building-applications/1-grabbing-images.md b/docs/docs/building-applications/1-grabbing-images.md index 64b0bd1b..b8a34f60 100644 --- a/docs/docs/building-applications/1-grabbing-images.md +++ b/docs/docs/building-applications/1-grabbing-images.md @@ -1,3 +1,7 @@ +--- +sidebar_position: 1 +--- + # Grabbing Images Groundlight's SDK accepts images in many popular formats, including PIL, OpenCV, and numpy arrays. diff --git a/docs/docs/building-applications/working-with-detectors.md b/docs/docs/building-applications/2-working-with-detectors.md similarity index 99% rename from docs/docs/building-applications/working-with-detectors.md rename to docs/docs/building-applications/2-working-with-detectors.md index 0da85c34..dda90640 100644 --- a/docs/docs/building-applications/working-with-detectors.md +++ b/docs/docs/building-applications/2-working-with-detectors.md @@ -1,5 +1,5 @@ --- -sidebar_position: 3 +sidebar_position: 2 --- # Working with Detectors diff --git a/docs/docs/building-applications/5-managing-confidence.md b/docs/docs/building-applications/3-managing-confidence.md similarity index 98% rename from docs/docs/building-applications/5-managing-confidence.md rename to docs/docs/building-applications/3-managing-confidence.md index 5aa66f68..757ce8bd 100644 --- a/docs/docs/building-applications/5-managing-confidence.md +++ b/docs/docs/building-applications/3-managing-confidence.md @@ -1,3 +1,6 @@ +--- +sidebar_position: 3 +--- # Confidence Levels Groundlight gives you a simple way to control the trade-off of latency against accuracy. The longer you can wait for an answer to your image query, the better accuracy you can get. In particular, if the ML models are unsure of the best response, they will escalate the image query to more intensive analysis with more complex models and real-time human monitors as needed. Your code can easily wait for this delayed response. Either way, these new results are automatically trained into your models so your next queries will get better results faster. diff --git a/docs/docs/building-applications/handling-errors.md b/docs/docs/building-applications/4-handling-errors.md similarity index 99% rename from docs/docs/building-applications/handling-errors.md rename to docs/docs/building-applications/4-handling-errors.md index 32a6ca17..36bf8766 100644 --- a/docs/docs/building-applications/handling-errors.md +++ b/docs/docs/building-applications/4-handling-errors.md @@ -1,3 +1,7 @@ +--- +sidebar_position: 4 +--- + # Handling Server Errors When building applications with the Groundlight SDK, you may encounter server errors during API calls. This page covers how to handle such errors and build robust code that can gracefully handle exceptions. diff --git a/docs/docs/building-applications/async-queries.md b/docs/docs/building-applications/5-async-queries.md similarity index 99% rename from docs/docs/building-applications/async-queries.md rename to docs/docs/building-applications/5-async-queries.md index 02d28d66..05da2144 100644 --- a/docs/docs/building-applications/async-queries.md +++ b/docs/docs/building-applications/5-async-queries.md @@ -1,3 +1,7 @@ +--- +sidebar_position: 5 +--- + # Asynchronous Queries Groundlight provides a simple interface for submitting asynchronous queries. This is useful for times in which the thread or machine submitting image queries is not the same thread or machine that will be retrieving and using the results. For example, you might have a forward deployed robot or camera that submits image queries to Groundlight, and a separate server that retrieves the results and takes action based on them. We will refer to these two machines as the **submitting machine** and the **retrieving machine**. diff --git a/docs/docs/building-applications/edge.md b/docs/docs/building-applications/6-edge.md similarity index 96% rename from docs/docs/building-applications/edge.md rename to docs/docs/building-applications/6-edge.md index 3098d0e0..03f6a21f 100644 --- a/docs/docs/building-applications/edge.md +++ b/docs/docs/building-applications/6-edge.md @@ -1,4 +1,8 @@ -# Using Groundlight on the edge +--- +sidebar_position: 6 +--- + +# Using Groundlight on the Edge If your account has access to edge models, you can download and install them to your edge devices. This allows you to run your model evaluations on the edge, reducing latency, cost, network bandwidth, and energy. diff --git a/docs/docs/building-applications/industrial.md b/docs/docs/building-applications/7-industrial.md similarity index 98% rename from docs/docs/building-applications/industrial.md rename to docs/docs/building-applications/7-industrial.md index 48dd0eef..cf50d3ed 100644 --- a/docs/docs/building-applications/industrial.md +++ b/docs/docs/building-applications/7-industrial.md @@ -1,3 +1,7 @@ +--- +sidebar_position: 7 +--- + # Industrial and Manufacturing Applications Modern natural language-based computer vision is transforming industrial and manufacturing applications by enabling more intuitive interaction with automation systems. Groundlight offers cutting-edge computer vision technology that can be seamlessly integrated into various industrial processes, enhancing efficiency, productivity, and quality control. diff --git a/docs/docs/building-applications/building-applications.md b/docs/docs/building-applications/building-applications.md index 759a6773..d52b039d 100644 --- a/docs/docs/building-applications/building-applications.md +++ b/docs/docs/building-applications/building-applications.md @@ -33,15 +33,18 @@ This sample application demonstrates how to set up a Raspberry Pi-based door loc ### Industrial and Manufacturing Applications Groundlight can be used to [apply modern natural-language-based computer vision to industrial and manufacturing applications](/docs/building-applications/industrial). -# TODO: update Further reading section + ## Further Reading For more in-depth guides on various aspects of building applications with Groundlight, check out the following pages: - -- [Working with Detectors](working-with-detectors.md): Learn how to create, configure, and use detectors in your Groundlight-powered applications. -- [Using Groundlight on the edge](edge.md): Discover how to deploy Groundlight in edge computing environments for improved performance and reduced latency. -- [Handling HTTP errors](handling-errors.md): Understand how to handle and troubleshoot HTTP errors that may occur while using Groundlight. - +- **[Grabbing images](1-grabbing-images.md)**: Understand the intricacies of how to submit images from various input sources to Groundlight. +- **[Working with detectors](2-working-with-detectors.md)**: Learn how to create, configure, and use detectors in your Groundlight-powered applications. +- **[Confidence levels](3-managing-confidence.md)**: Master how to control the trade-off of latency against accuracy by configuring the desired confidence level for your detectors. +- **[Handling server errors](4-handling-errors.md)**: Understand how to handle and troubleshoot HTTP errors that may occur while using Groundlight. +- **[Asynchronous queries](5-async-queries.md)**: Groundlight makes it easy to submit asynchronous queries. Learn how to submit queries asynchronously and retrieve the results later. +- **[Using Groundlight on the edge](6-edge.md)**: Discover how to deploy Groundlight in edge computing environments for improved performance and reduced latency. +- **[Industrial applications](7-industrial.md)**: Learn how to apply modern natural-language-based computer vision to your industrial and manufacturing applications. + By exploring these resources and sample applications, you'll be well on your way to building powerful visual applications using Groundlight's computer vision and natural language capabilities. diff --git a/docs/docs/getting-started/getting-started.mdx b/docs/docs/getting-started/getting-started.mdx index c3a5f716..2fd6d2bd 100644 --- a/docs/docs/getting-started/getting-started.mdx +++ b/docs/docs/getting-started/getting-started.mdx @@ -18,7 +18,7 @@ _Note: The SDK is currently in "beta" phase. Interfaces are subject to change in ### How does it work? -Your images are first analyzed by machine learning (ML) models which are automatically trained on your data. If those models have high enough [confidence](docs/building-applications/managing-confidence), that's your answer. But if the models are unsure, then the images are progressively escalated to more resource-intensive analysis methods up to real-time human review. So what you get is a computer vision system that starts working right away without even needing to first gather and label a dataset. At first it will operate with high latency, because people need to review the image queries. But over time, the ML systems will learn and improve so queries come back faster with higher confidence. +Your images are first analyzed by machine learning (ML) models which are automatically trained on your data. If those models have high enough [confidence](docs/building-applications/3-managing-confidence.md), that's your answer. But if the models are unsure, then the images are progressively escalated to more resource-intensive analysis methods up to real-time human review. So what you get is a computer vision system that starts working right away without even needing to first gather and label a dataset. At first it will operate with high latency, because people need to review the image queries. But over time, the ML systems will learn and improve so queries come back faster with higher confidence. ### Escalation Technology From af83bad93233c652cd2016876ff80ff50a2aa3f7 Mon Sep 17 00:00:00 2001 From: Sunil Kumar Date: Wed, 18 Oct 2023 09:25:36 -0700 Subject: [PATCH 12/14] Update docs/docs/building-applications/5-async-queries.md Co-authored-by: blaise-muhirwa <135643310+blaise-muhirwa@users.noreply.github.com> --- docs/docs/building-applications/5-async-queries.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs/building-applications/5-async-queries.md b/docs/docs/building-applications/5-async-queries.md index 05da2144..2c55b5df 100644 --- a/docs/docs/building-applications/5-async-queries.md +++ b/docs/docs/building-applications/5-async-queries.md @@ -7,7 +7,7 @@ sidebar_position: 5 Groundlight provides a simple interface for submitting asynchronous queries. This is useful for times in which the thread or machine submitting image queries is not the same thread or machine that will be retrieving and using the results. For example, you might have a forward deployed robot or camera that submits image queries to Groundlight, and a separate server that retrieves the results and takes action based on them. We will refer to these two machines as the **submitting machine** and the **retrieving machine**. ## Setup Submitting Machine -On the **submitting machine**, you will need to install the Groundlight Python SDK. Then you can submit image queries asynchronously using the `ask_async` interface (read the full documentation [here](pathname:///python-sdk/api-reference-docs/#groundlight.client.Groundlight.ask_async)). `ask_async` submits your query and returns as soon as the query is submitted. It does not wait for an answer to be available prior to returning to minimize the time your program spends interacting with Groundlight. As a result, the `ImageQuery` object `ask_async` returns lacks a `result` (the `result` field will `None`). This is acceptable for this use case as the **submitting machine** is not interested in the result. Instead, the **submitting machine** just needs to communicate the `ImageQuery.id`s to the **retrieving machine** - this might be done via a database, a message queue, or some other mechanism. For this example, we assume you are using a database where you save the `ImageQuery.id` to it via `db.save(image_query.id)`. +On the **submitting machine**, you will need to install the Groundlight Python SDK. Then you can submit image queries asynchronously using the `ask_async` interface (read the full documentation [here](pathname:///python-sdk/api-reference-docs/#groundlight.client.Groundlight.ask_async)). `ask_async` submits your query and returns as soon as the query is submitted. It does not wait for an answer to be available prior to returning to minimize the time your program spends interacting with Groundlight. As a result, the `ImageQuery` object `ask_async` returns lacks a `result` (the `result` field will be `None`). This is acceptable for this use case as the **submitting machine** is not interested in the result. Instead, the **submitting machine** just needs to communicate the `ImageQuery.id`s to the **retrieving machine** - this might be done via a database, a message queue, or some other mechanism. For this example, we assume you are using a database where you save the `ImageQuery.id` to it via `db.save(image_query.id)`. ```python notest from groundlight import Groundlight From 5dccedf5822a90b8bec9e7e1e4e3ecc476b486bc Mon Sep 17 00:00:00 2001 From: Sunil Kumar Date: Wed, 18 Oct 2023 09:27:14 -0700 Subject: [PATCH 13/14] Update docs/docs/building-applications/5-async-queries.md Co-authored-by: robotrapta <79607467+robotrapta@users.noreply.github.com> --- docs/docs/building-applications/5-async-queries.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs/building-applications/5-async-queries.md b/docs/docs/building-applications/5-async-queries.md index 2c55b5df..17d139ec 100644 --- a/docs/docs/building-applications/5-async-queries.md +++ b/docs/docs/building-applications/5-async-queries.md @@ -4,7 +4,7 @@ sidebar_position: 5 # Asynchronous Queries -Groundlight provides a simple interface for submitting asynchronous queries. This is useful for times in which the thread or machine submitting image queries is not the same thread or machine that will be retrieving and using the results. For example, you might have a forward deployed robot or camera that submits image queries to Groundlight, and a separate server that retrieves the results and takes action based on them. We will refer to these two machines as the **submitting machine** and the **retrieving machine**. +Groundlight provides a simple interface for submitting asynchronous queries. This is useful for times in which the thread or process or machine submitting image queries is not the same thread or machine that will be retrieving and using the results. For example, you might have a forward deployed robot or camera that submits image queries to Groundlight, and a separate server that retrieves the results and takes action based on them. We will refer to these two machines as the **submitting machine** and the **retrieving machine**. ## Setup Submitting Machine On the **submitting machine**, you will need to install the Groundlight Python SDK. Then you can submit image queries asynchronously using the `ask_async` interface (read the full documentation [here](pathname:///python-sdk/api-reference-docs/#groundlight.client.Groundlight.ask_async)). `ask_async` submits your query and returns as soon as the query is submitted. It does not wait for an answer to be available prior to returning to minimize the time your program spends interacting with Groundlight. As a result, the `ImageQuery` object `ask_async` returns lacks a `result` (the `result` field will be `None`). This is acceptable for this use case as the **submitting machine** is not interested in the result. Instead, the **submitting machine** just needs to communicate the `ImageQuery.id`s to the **retrieving machine** - this might be done via a database, a message queue, or some other mechanism. For this example, we assume you are using a database where you save the `ImageQuery.id` to it via `db.save(image_query.id)`. From 6dc91fc74c1d463ce40157126637de12d55d06b7 Mon Sep 17 00:00:00 2001 From: senorkumar Date: Wed, 18 Oct 2023 09:32:02 -0700 Subject: [PATCH 14/14] small fixes based on PR feedback from Leo and Blaise --- docs/docs/building-applications/5-async-queries.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/docs/building-applications/5-async-queries.md b/docs/docs/building-applications/5-async-queries.md index 17d139ec..d14704c2 100644 --- a/docs/docs/building-applications/5-async-queries.md +++ b/docs/docs/building-applications/5-async-queries.md @@ -18,7 +18,7 @@ detector = gl.get_or_create_detector(name="your_detector_name", query="your_quer cam = cv2.VideoCapture(0) # Initialize camera (0 is the default index) -while True: # TODO: add a way to exit this loop... not sure what makes sense here +while True: _, image = cam.read() # Capture one frame from the camera image_query = gl.ask_async(detector=detector, image=image) # Submit the frame to Groundlight db.save(image_query.id) # Save the image_query.id to a database for the retrieving machine to use @@ -54,7 +54,7 @@ while image_query_id is not None: ``` ## Important Considerations -When you submit an image query asynchronously, ML prediction on your query is **not** instant. So attempting to retrieve the result immediately after submitting the query will likely result in an `UNCLEAR` result as Groundlight is still processing your query. Instead, if your code needs a `result` synchronously we recommend using one of our methods with a polling mechanism to retrieve the result. You can see all of the interfaces available in the documentation [here](pathname:///python-sdk/api-reference-docs/#groundlight.client.Groundlight). +When you submit an image query asynchronously, ML prediction on your query is **not** instant. So attempting to retrieve the result immediately after submitting an async query will likely result in an `UNCLEAR` result as Groundlight is still processing your query. Instead, if your code needs a `result` synchronously we recommend using one of our methods with a polling mechanism to retrieve the result. You can see all of the interfaces available in the documentation [here](pathname:///python-sdk/api-reference-docs/#groundlight.client.Groundlight). ```python notest from groundlight import Groundlight @@ -62,9 +62,9 @@ from PIL import Image detector = gl.get_or_create_detector(name="your_detector_name", query="your_query") image = Image.open("/path/to/your/image.jpg") -image_query = gl.ask_async(detector=detector, image=image) # Submit the frame to Groundlight -result = image_query.result # This will likely be 'UNCLEAR' as Groundlight is still processing your query -``` - -# TODO: what other considerations are there? +image_query = gl.ask_async(detector=detector, image=image) # Submit async query to Groundlight +result = image_query.result # This will always be 'None' as you asked asynchronously +image_query = gl.get_image_query(id=image_query.id) # Immediately retrieve the image query from Groundlight +result = image_query.result # This will likely be 'UNCLEAR' as Groundlight is still processing your query +``` \ No newline at end of file