diff --git a/pinecone/core/__init__.py b/pinecone/core/__init__.py index 8b137891..e69de29b 100644 --- a/pinecone/core/__init__.py +++ b/pinecone/core/__init__.py @@ -1 +0,0 @@ - diff --git a/pinecone/core/api_action.py b/pinecone/core/api_action.py deleted file mode 100644 index 12fc74f3..00000000 --- a/pinecone/core/api_action.py +++ /dev/null @@ -1,37 +0,0 @@ -from typing import NamedTuple -from pinecone.core.api_base import BaseAPI - -__all__ = ["ActionAPI", "VersionResponse", "WhoAmIResponse"] - -from pinecone.core.utils import get_version - - -class WhoAmIResponse(NamedTuple): - username: str = "UNKNOWN" - user_label: str = "UNKNOWN" - projectname: str = "UNKNOWN" - - -class VersionResponse(NamedTuple): - server: str - client: str - - -class ActionAPI(BaseAPI): - """User related API calls.""" - - client_version = get_version() - - def whoami(self) -> WhoAmIResponse: - """Returns user information.""" - response = self.get("/actions/whoami") - return WhoAmIResponse( - username=response.get("user_name", "UNDEFINED"), - projectname=response.get("project_name", "UNDEFINED"), - user_label=response.get("user_label", "UNDEFINED"), - ) - - def version(self) -> VersionResponse: - """Returns version information.""" - response = self.get("/actions/version") - return VersionResponse(server=response.get("version", "UNKNOWN"), client=self.client_version) diff --git a/pinecone/core/api_base.py b/pinecone/core/api_base.py deleted file mode 100644 index 93a3a7ae..00000000 --- a/pinecone/core/api_base.py +++ /dev/null @@ -1,35 +0,0 @@ -import requests -from requests.exceptions import HTTPError - - -class BaseAPI: - """Base class for HTTP API calls.""" - - def __init__(self, host: str, api_key: str = None): - self.host = host - self.api_key = api_key - - @property - def headers(self): - return {"api-key": self.api_key} - - def _send_request(self, request_handler, url, **kwargs): - response = request_handler("{0}{1}".format(self.host, url), headers=self.headers, **kwargs) - try: - response.raise_for_status() - except HTTPError as e: - e.args = e.args + (response.text,) - raise e - return response.json() - - def get(self, url: str, params: dict = None): - return self._send_request(requests.get, url, params=params) - - def post(self, url: str, json: dict = None): - return self._send_request(requests.post, url, json=json) - - def patch(self, url: str, json: dict = None): - return self._send_request(requests.patch, url, json=json) - - def delete(self, url: str): - return self._send_request(requests.delete, url) diff --git a/pinecone/core/client/api/index_operations_api.py b/pinecone/core/client/api/index_operations_api.py index 5da15f3e..e1d43059 100644 --- a/pinecone/core/client/api/index_operations_api.py +++ b/pinecone/core/client/api/index_operations_api.py @@ -1,9 +1,11 @@ """ - Pinecone index operations API + Pinecone API - # noqa: E501 + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 + The version of the OpenAPI document: version not set Contact: support@pinecone.io + Generated by: https://openapi-generator.tech """ @@ -24,6 +26,7 @@ from pinecone.core.client.model.create_collection_request import CreateCollectionRequest from pinecone.core.client.model.create_request import CreateRequest from pinecone.core.client.model.index_meta import IndexMeta +from pinecone.core.client.model.inline_response200 import InlineResponse200 from pinecone.core.client.model.patch_request import PatchRequest @@ -75,7 +78,7 @@ def __configure_index(self, index_name, **kwargs): async_req (bool): execute request asynchronously Returns: - str + IndexMeta If the method is called asynchronously, returns the request thread. """ @@ -91,7 +94,7 @@ def __configure_index(self, index_name, **kwargs): self.configure_index = _Endpoint( settings={ - "response_type": (str,), + "response_type": (IndexMeta,), "auth": ["ApiKeyAuth"], "endpoint_path": "/databases/{indexName}", "operation_id": "configure_index", @@ -137,7 +140,7 @@ def __configure_index(self, index_name, **kwargs): }, "collection_format_map": {}, }, - headers_map={"accept": ["text/plain"], "content_type": ["application/json"]}, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, api_client=api_client, callable=__configure_index, ) @@ -145,7 +148,7 @@ def __configure_index(self, index_name, **kwargs): def __create_collection(self, **kwargs): """create_collection # noqa: E501 - This operation creates a Pinecone collection from an existing index. # noqa: E501 + This operation creates a Pinecone collection. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -238,7 +241,7 @@ def __create_collection(self, **kwargs): def __create_index(self, **kwargs): """create_index # noqa: E501 - This operation creates a Pinecone index. Specify the distance metric, the dimension of vectors to be stored in the index, the numbers replicas to use, and the collection from which to create the index, if applicable. # noqa: E501 + This operation creates a Pinecone index. You can use it to specify the measure of similarity, the dimension of vectors to be stored in the index, the numbers of shards and replicas to use, and more. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -269,7 +272,7 @@ def __create_index(self, **kwargs): async_req (bool): execute request asynchronously Returns: - str + IndexMeta If the method is called asynchronously, returns the request thread. """ @@ -284,7 +287,7 @@ def __create_index(self, **kwargs): self.create_index = _Endpoint( settings={ - "response_type": (str,), + "response_type": (IndexMeta,), "auth": ["ApiKeyAuth"], "endpoint_path": "/databases", "operation_id": "create_index", @@ -323,7 +326,7 @@ def __create_index(self, **kwargs): }, "collection_format_map": {}, }, - headers_map={"accept": ["text/plain"], "content_type": ["application/json"]}, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, api_client=api_client, callable=__create_index, ) @@ -535,7 +538,7 @@ def __delete_index(self, index_name, **kwargs): def __describe_collection(self, collection_name, **kwargs): """describe_collection # noqa: E501 - Get a description of a collection, including the name, size, and status. # noqa: E501 + Get a description of a collection. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -637,7 +640,7 @@ def __describe_collection(self, collection_name, **kwargs): def __describe_index(self, index_name, **kwargs): """describe_index # noqa: E501 - Get a description of an index, including dimension, distance metric, number of replicas, and more. # noqa: E501 + Get a description of an index. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -822,7 +825,7 @@ def __list_collections(self, **kwargs): def __list_indexes(self, **kwargs): """list_indexes # noqa: E501 - This operation returns a list of the indexes in the current project. # noqa: E501 + This operation returns a list of your Pinecone indexes. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -852,7 +855,7 @@ def __list_indexes(self, **kwargs): async_req (bool): execute request asynchronously Returns: - [str] + InlineResponse200 If the method is called asynchronously, returns the request thread. """ @@ -867,7 +870,7 @@ def __list_indexes(self, **kwargs): self.list_indexes = _Endpoint( settings={ - "response_type": ([str],), + "response_type": (InlineResponse200,), "auth": ["ApiKeyAuth"], "endpoint_path": "/databases", "operation_id": "list_indexes", diff --git a/pinecone/core/client/api/vector_operations_api.py b/pinecone/core/client/api/vector_operations_api.py index 1f6d2d25..cd94404a 100644 --- a/pinecone/core/client/api/vector_operations_api.py +++ b/pinecone/core/client/api/vector_operations_api.py @@ -1,5 +1,5 @@ """ - Pinecone vector operations API + Pinecone API No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 @@ -49,9 +49,9 @@ def __init__(self, api_client=None): def __delete(self, delete_request, **kwargs): """Delete # noqa: E501 - The `Delete` operation deletes records by ID from a single namespace. # noqa: E501 + The `Delete` operation deletes vectors, by id, from a single namespace. You can delete items by their id, from a single namespace. # noqa: E501 This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, pass async_req=True + asynchronous HTTP request, please pass async_req=True >>> thread = api.delete(delete_request, async_req=True) >>> result = thread.get() @@ -60,24 +60,24 @@ def __delete(self, delete_request, **kwargs): delete_request (DeleteRequest): Keyword Args: - _return_http_data_only (bool): Return response data without head status + _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object - is returned without reading or decoding response data. + will be returned without reading/decoding response data. Default is True. - _request_timeout (int/float/tuple): The timeout setting for this request. If - one number is provided, it is the total request timeout. It can also + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. - _check_input_type (bool): Specifies if type checking - should be done on the data sent to the server. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. Default is True. - _check_return_type (bool): Specifies if type checking - should be done on the data received from the server. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. Default is True. - _host_index (int/None): Specifies the index of the server + _host_index (int/None): specifies the index of the server that we want to use. - Default is to read from the configuration. + Default is read from the configuration. async_req (bool): execute request asynchronously Returns: @@ -135,36 +135,36 @@ def __delete(self, delete_request, **kwargs): def __delete1(self, **kwargs): """Delete # noqa: E501 - The `Delete` operation deletes records by ID from a single namespace. # noqa: E501 + The `Delete` operation deletes vectors, by id, from a single namespace. You can delete items by their id, from a single namespace. # noqa: E501 This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, pass async_req=True + asynchronous HTTP request, please pass async_req=True >>> thread = api.delete1(async_req=True) >>> result = thread.get() Keyword Args: - ids ([str]): Vectors to delete. [optional] - delete_all (bool): This indicates that all records in the index namespace should be deleted. [optional] Default is False. - namespace (str): The namespace to delete records from, if applicable. [optional] - _return_http_data_only (bool): Response data without head status + ids ([str]): Vectors to delete.. [optional] + delete_all (bool): This indicates that all vectors in the index namespace should be deleted.. [optional] if omitted the server will use the default value of False + namespace (str): The namespace to delete vectors from, if applicable.. [optional] + _return_http_data_only (bool): response data without head status code and headers. Default is True. - _preload_content (bool): If False, the urllib3.HTTPResponse object - is returned without reading or decoding response data. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. Default is True. - _request_timeout (int/float/tuple): Timeout setting for this request. If - one number is provided, this number is the total request timeout. This argument can also + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. - _check_input_type (bool): Specifies if type checking - should be done after the data is sent to the server. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. Default is True. - _check_return_type (bool): Specifies if type checking - should be done after the data is received from the server. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. Default is True. - _host_index (int/None): Specifies the index of the server - to use. - Default is to read from the configuration. + _host_index (int/None): specifies the index of the server + that we want to use. + Default is read from the configuration. async_req (bool): execute request asynchronously Returns: @@ -236,7 +236,7 @@ def __describe_index_stats(self, describe_index_stats_request, **kwargs): The `DescribeIndexStats` operation returns statistics about the index's contents, including the vector count per namespace, the number of dimensions, and the index fullness. The index fullness result may be inaccurate during pod resizing; to get the status of a pod resizing process, use [`describe_index`](https://www.pinecone.io/docs/api/operation/describe_index/). # noqa: E501 This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, pass async_req=True + asynchronous HTTP request, please pass async_req=True >>> thread = api.describe_index_stats(describe_index_stats_request, async_req=True) >>> result = thread.get() @@ -245,24 +245,24 @@ def __describe_index_stats(self, describe_index_stats_request, **kwargs): describe_index_stats_request (DescribeIndexStatsRequest): Keyword Args: - _return_http_data_only (bool): Return head status - code or headers. Default is True. - _preload_content (bool): If False, the urllib3.HTTPResponse object - is returned without reading or decoding response data. + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. Default is True. - _request_timeout (int/float/tuple): The timeout setting for this request. If - one number is provided, it is the total request timeout. It can also + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. - _check_input_type (bool): Specifies if type checking - should be done on the data sent to the server. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. Default is True. - _check_return_type (bool): Specifies if type checking - should be done on the data received from the server. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. Default is True. - _host_index (int/None): Specifies the index of the server + _host_index (int/None): specifies the index of the server that we want to use. - Default is to read from the configuration. + Default is read from the configuration. async_req (bool): execute request asynchronously Returns: @@ -322,31 +322,31 @@ def __describe_index_stats1(self, **kwargs): The `DescribeIndexStats` operation returns statistics about the index's contents, including the vector count per namespace, the number of dimensions, and the index fullness. The index fullness result may be inaccurate during pod resizing; to get the status of a pod resizing process, use [`describe_index`](https://www.pinecone.io/docs/api/operation/describe_index/). # noqa: E501 This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, pass async_req=True + asynchronous HTTP request, please pass async_req=True >>> thread = api.describe_index_stats1(async_req=True) >>> result = thread.get() Keyword Args: - _return_http_data_only (bool): Response data without head status + _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object - is returned without reading or decoding response data. + will be returned without reading/decoding response data. Default is True. - _request_timeout (int/float/tuple): The timeout setting for this request. If - one number is provided, it is the total request timeout. It can also + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. - _check_input_type (bool): Specifies if type checking - should be done on the data sent to the server. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. Default is True. - _check_return_type (bool): Specifies if type checking - should be done on the data received from the server. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. Default is True. - _host_index (int/None): Specifies the index of the server + _host_index (int/None): specifies the index of the server that we want to use. - Default is to read from the configuration. + Default is read from the configuration. async_req (bool): execute request asynchronously Returns: @@ -392,9 +392,9 @@ def __describe_index_stats1(self, **kwargs): def __fetch(self, ids, **kwargs): """Fetch # noqa: E501 - The `Fetch` operation looks up and returns vectors by ID from a single namespace. The returned vectors include the vector data and metadata. # noqa: E501 + The `Fetch` operation looks up and returns vectors, by ID, from a single namespace. The returned vectors include the vector data and/or metadata. # noqa: E501 This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, pass async_req=True + asynchronous HTTP request, please pass async_req=True >>> thread = api.fetch(ids, async_req=True) >>> result = thread.get() @@ -404,24 +404,24 @@ def __fetch(self, ids, **kwargs): Keyword Args: namespace (str): [optional] - _return_http_data_only (bool): Response data without head status + _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object - is returned without reading or decoding response data. + will be returned without reading/decoding response data. Default is True. - _request_timeout (int/float/tuple): The timeout setting for this request. If - one number is provided, it is the total request timeout. It can also + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. - _check_input_type (bool): Specifies if type checking - should be done on the data sent to the server. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. Default is True. - _check_return_type (bool): Specifies if type checking - should be done on the data received from the server. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. Default is True. - _host_index (int/None): Specifies the index of the server + _host_index (int/None): specifies the index of the server that we want to use. - Default is to read from the configuration. + Default is read from the configuration. async_req (bool): execute request asynchronously Returns: @@ -490,9 +490,9 @@ def __fetch(self, ids, **kwargs): def __query(self, query_request, **kwargs): """Query # noqa: E501 - The `Query` operation searches a namespace using a query vector. It retrieves the IDs of the most similar records in a namespace along with their similarity scores. # noqa: E501 + The `Query` operation searches a namespace, using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. # noqa: E501 This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, pass async_req=True + asynchronous HTTP request, please pass async_req=True >>> thread = api.query(query_request, async_req=True) >>> result = thread.get() @@ -501,24 +501,24 @@ def __query(self, query_request, **kwargs): query_request (QueryRequest): Keyword Args: - _return_http_data_only (bool): Return response data without head status + _return_http_data_only (bool): response data without head status code and headers. Default is True. - _preload_content (bool): If False, the urllib3.HTTPResponse object - is returned without reading or decoding response data. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. Default is True. - _request_timeout (int/float/tuple): The timeout setting for this request. If - one number is provided, it is the total request timeout. It can also + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. - _check_input_type (bool): Specifies if type checking - should be done on the data sent to the server. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. Default is True. - _check_return_type (bool): Specifies if type checking - should be done on the data received from the server. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. Default is True. - _host_index (int/None): Specifies the index of the server + _host_index (int/None): specifies the index of the server that we want to use. - Default is to read from the configuration. + Default is read from the configuration. async_req (bool): execute request asynchronously Returns: @@ -576,9 +576,9 @@ def __query(self, query_request, **kwargs): def __update(self, update_request, **kwargs): """Update # noqa: E501 - The `Update` operation updates a vector in a namespace. If a value is included, it overwrites the previous value. If set_metadata is included, the values of the fields specified in it are added to or overwrite the previous values. # noqa: E501 + The `Update` operation updates vector in a namespace. If a value is included, it will overwrite the previous value. If a set_metadata is included, the values of the fields specified in it will be added or overwrite the previous value. # noqa: E501 This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, pass async_req=True + asynchronous HTTP request, please pass async_req=True >>> thread = api.update(update_request, async_req=True) >>> result = thread.get() @@ -587,24 +587,24 @@ def __update(self, update_request, **kwargs): update_request (UpdateRequest): Keyword Args: - _return_http_data_only (bool): Response head status + _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object - is returned without reading or decoding response data. + will be returned without reading/decoding response data. Default is True. - _request_timeout (int/float/tuple): The timeout setting for this request. If - one number is provided, it is the total request timeout. It can also + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. - _check_input_type (bool): Specifies if type checking - should be done on the data sent to the server. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. Default is True. - _check_return_type (bool): Specifies if type checking - should be done on the data received from the server. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. Default is True. - _host_index (int/None): Specifies the index of the server + _host_index (int/None): specifies the index of the server that we want to use. - Default is to read from the configuration. + Default is read from the configuration. async_req (bool): execute request asynchronously Returns: @@ -662,9 +662,9 @@ def __update(self, update_request, **kwargs): def __upsert(self, upsert_request, **kwargs): """Upsert # noqa: E501 - The `Upsert` operation writes vectors into a namespace. If a new value is upserted for an existing vector ID, it overwrites the previous value. # noqa: E501 + The `Upsert` operation writes vectors into a namespace. If a new value is upserted for an existing vector id, it will overwrite the previous value. # noqa: E501 This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, pass async_req=True + asynchronous HTTP request, please pass async_req=True >>> thread = api.upsert(upsert_request, async_req=True) >>> result = thread.get() @@ -673,24 +673,24 @@ def __upsert(self, upsert_request, **kwargs): upsert_request (UpsertRequest): Keyword Args: - _return_http_data_only (bool): Return response data without head status + _return_http_data_only (bool): response data without head status code and headers. Default is True. _preload_content (bool): if False, the urllib3.HTTPResponse object - is returned without reading or decoding response data. + will be returned without reading/decoding response data. Default is True. - _request_timeout (int/float/tuple): The timeout setting for this request. If - one number is provided, it is the total request timeout. It can also + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. - _check_input_type (bool): Specifies if type checking - should be done on the data sent to the server. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. Default is True. - _check_return_type (bool): Specifies if type checking - should be done on the data received from the server. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. Default is True. - _host_index (int/None): Specifies the index of the server + _host_index (int/None): specifies the index of the server that we want to use. - Default is to read from the configuration. + Default is read from the configuration. async_req (bool): execute request asynchronously Returns: diff --git a/pinecone/core/client/exceptions.py b/pinecone/core/client/exceptions.py index 5cb822bc..ec4f9cca 100644 --- a/pinecone/core/client/exceptions.py +++ b/pinecone/core/client/exceptions.py @@ -9,10 +9,7 @@ """ -from pinecone.core.exceptions import PineconeException - - -class OpenApiException(PineconeException): +class OpenApiException(Exception): """The base exception class for all OpenAPIExceptions""" diff --git a/pinecone/core/client/model/collection_meta.py b/pinecone/core/client/model/collection_meta.py index de2f5b6b..8616a63a 100644 --- a/pinecone/core/client/model/collection_meta.py +++ b/pinecone/core/client/model/collection_meta.py @@ -92,6 +92,8 @@ def openapi_types(): "name": (str,), # noqa: E501 "size": (int,), # noqa: E501 "status": (str,), # noqa: E501 + "dimension": (int,), # noqa: E501 + "vector_count": (int,), # noqa: E501 } @cached_property @@ -102,6 +104,8 @@ def discriminator(): "name": "name", # noqa: E501 "size": "size", # noqa: E501 "status": "status", # noqa: E501 + "dimension": "dimension", # noqa: E501 + "vector_count": "vector_count", # noqa: E501 } read_only_vars = {} @@ -110,9 +114,16 @@ def discriminator(): @classmethod @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 + def _from_openapi_data(cls, name, size, status, dimension, vector_count, *args, **kwargs): # noqa: E501 """CollectionMeta - a model defined in OpenAPI + Args: + name (str): + size (int): The size of the collection in bytes. + status (str): The status of the collection. + dimension (int): The dimension of the records stored in the collection + vector_count (int): The number of records stored in the collection + Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be @@ -144,9 +155,6 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - size (int): The size of the collection in bytes.. [optional] # noqa: E501 - status (str): The status of the collection.. [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) @@ -175,6 +183,11 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.name = name + self.size = size + self.status = status + self.dimension = dimension + self.vector_count = vector_count for var_name, var_value in kwargs.items(): if ( var_name not in self.attribute_map @@ -199,9 +212,16 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 ) @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 + def __init__(self, name, size, status, dimension, vector_count, *args, **kwargs): # noqa: E501 """CollectionMeta - a model defined in OpenAPI + Args: + name (str): + size (int): The size of the collection in bytes. + status (str): The status of the collection. + dimension (int): The dimension of the records stored in the collection + vector_count (int): The number of records stored in the collection + Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be @@ -233,9 +253,6 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - size (int): The size of the collection in bytes.. [optional] # noqa: E501 - status (str): The status of the collection.. [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) @@ -262,6 +279,11 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.name = name + self.size = size + self.status = status + self.dimension = dimension + self.vector_count = vector_count for var_name, var_value in kwargs.items(): if ( var_name not in self.attribute_map diff --git a/pinecone/core/client/model/create_collection_request.py b/pinecone/core/client/model/create_collection_request.py index 47379c0e..8ccb6823 100644 --- a/pinecone/core/client/model/create_collection_request.py +++ b/pinecone/core/client/model/create_collection_request.py @@ -113,7 +113,7 @@ def _from_openapi_data(cls, name, source, *args, **kwargs): # noqa: E501 Args: name (str): The name of the collection to be created. - source (str): The name of the source index to be used as the source for the collection. + source (str): The name of the index to be used as the source for the collection. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -205,7 +205,7 @@ def __init__(self, name, source, *args, **kwargs): # noqa: E501 Args: name (str): The name of the collection to be created. - source (str): The name of the source index to be used as the source for the collection. + source (str): The name of the index to be used as the source for the collection. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/pinecone/core/client/model/create_request.py b/pinecone/core/client/model/create_request.py index 1aa615c0..ada8180b 100644 --- a/pinecone/core/client/model/create_request.py +++ b/pinecone/core/client/model/create_request.py @@ -54,7 +54,17 @@ class CreateRequest(ModelNormal): as additional properties values. """ - allowed_values = {} + allowed_values = { + ("cloud",): { + "GCP": "gcp", + "AWS": "aws", + "AZURE": "azure", + }, + ("capacity_mode",): { + "SERVERLESS": "serverless", + "POD": "pod", + }, + } validations = {} @@ -91,6 +101,9 @@ def openapi_types(): return { "name": (str,), # noqa: E501 "dimension": (int,), # noqa: E501 + "region": (str,), # noqa: E501 + "cloud": (str,), # noqa: E501 + "capacity_mode": (str,), # noqa: E501 "index_type": (str,), # noqa: E501 "metric": (str,), # noqa: E501 "pods": (int,), # noqa: E501 @@ -112,6 +125,9 @@ def discriminator(): attribute_map = { "name": "name", # noqa: E501 "dimension": "dimension", # noqa: E501 + "region": "region", # noqa: E501 + "cloud": "cloud", # noqa: E501 + "capacity_mode": "capacity_mode", # noqa: E501 "index_type": "index_type", # noqa: E501 "metric": "metric", # noqa: E501 "pods": "pods", # noqa: E501 @@ -129,12 +145,15 @@ def discriminator(): @classmethod @convert_js_args_to_python_args - def _from_openapi_data(cls, name, dimension, *args, **kwargs): # noqa: E501 + def _from_openapi_data(cls, name, dimension, region, cloud, capacity_mode, *args, **kwargs): # noqa: E501 """CreateRequest - a model defined in OpenAPI Args: name (str): The name of the index to be created. The maximum length is 45 characters. dimension (int): The dimensions of the vectors to be inserted in the index + region (str): The region where you would like your index to be created + cloud (str): The public cloud where you would like your index hosted + capacity_mode (str): The capacity mode for the index. One of `serverless` or `pod`. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -206,6 +225,9 @@ def _from_openapi_data(cls, name, dimension, *args, **kwargs): # noqa: E501 self.name = name self.dimension = dimension + self.region = region + self.cloud = cloud + self.capacity_mode = capacity_mode for var_name, var_value in kwargs.items(): if ( var_name not in self.attribute_map @@ -230,12 +252,15 @@ def _from_openapi_data(cls, name, dimension, *args, **kwargs): # noqa: E501 ) @convert_js_args_to_python_args - def __init__(self, name, dimension, *args, **kwargs): # noqa: E501 + def __init__(self, name, dimension, region, cloud, capacity_mode, *args, **kwargs): # noqa: E501 """CreateRequest - a model defined in OpenAPI Args: name (str): The name of the index to be created. The maximum length is 45 characters. dimension (int): The dimensions of the vectors to be inserted in the index + region (str): The region where you would like your index to be created + cloud (str): The public cloud where you would like your index hosted + capacity_mode (str): The capacity mode for the index. One of `serverless` or `pod`. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -305,6 +330,9 @@ def __init__(self, name, dimension, *args, **kwargs): # noqa: E501 self.name = name self.dimension = dimension + self.region = region + self.cloud = cloud + self.capacity_mode = capacity_mode for var_name, var_value in kwargs.items(): if ( var_name not in self.attribute_map diff --git a/pinecone/core/client/model/index_meta.py b/pinecone/core/client/model/index_meta.py index e2e9bd94..e3fd076e 100644 --- a/pinecone/core/client/model/index_meta.py +++ b/pinecone/core/client/model/index_meta.py @@ -32,8 +32,10 @@ def lazy_import(): from pinecone.core.client.model.index_meta_database import IndexMetaDatabase + from pinecone.core.client.model.index_meta_status import IndexMetaStatus globals()["IndexMetaDatabase"] = IndexMetaDatabase + globals()["IndexMetaStatus"] = IndexMetaStatus class IndexMeta(ModelNormal): @@ -98,6 +100,7 @@ def openapi_types(): lazy_import() return { "database": (IndexMetaDatabase,), # noqa: E501 + "status": (IndexMetaStatus,), # noqa: E501 } @cached_property @@ -106,6 +109,7 @@ def discriminator(): attribute_map = { "database": "database", # noqa: E501 + "status": "status", # noqa: E501 } read_only_vars = {} @@ -114,9 +118,13 @@ def discriminator(): @classmethod @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 + def _from_openapi_data(cls, database, status, *args, **kwargs): # noqa: E501 """IndexMeta - a model defined in OpenAPI + Args: + database (IndexMetaDatabase): + status (IndexMetaStatus): + Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be @@ -148,7 +156,6 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - database (IndexMetaDatabase): [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) @@ -177,6 +184,8 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.database = database + self.status = status for var_name, var_value in kwargs.items(): if ( var_name not in self.attribute_map @@ -201,9 +210,13 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 ) @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 + def __init__(self, database, status, *args, **kwargs): # noqa: E501 """IndexMeta - a model defined in OpenAPI + Args: + database (IndexMetaDatabase): + status (IndexMetaStatus): + Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be @@ -235,7 +248,6 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - database (IndexMetaDatabase): [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) @@ -262,6 +274,8 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.database = database + self.status = status for var_name, var_value in kwargs.items(): if ( var_name not in self.attribute_map diff --git a/pinecone/core/client/model/index_meta_database.py b/pinecone/core/client/model/index_meta_database.py index a969abd8..525524c4 100644 --- a/pinecone/core/client/model/index_meta_database.py +++ b/pinecone/core/client/model/index_meta_database.py @@ -30,12 +30,6 @@ from pinecone.core.client.exceptions import ApiAttributeError -def lazy_import(): - from pinecone.core.client.model.index_meta_database_status import IndexMetaDatabaseStatus - - globals()["IndexMetaDatabaseStatus"] = IndexMetaDatabaseStatus - - class IndexMetaDatabase(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -60,7 +54,12 @@ class IndexMetaDatabase(ModelNormal): as additional properties values. """ - allowed_values = {} + allowed_values = { + ("capacity_mode",): { + "SERVERLESS": "serverless", + "POD": "pod", + }, + } validations = {} @@ -70,7 +69,6 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - lazy_import() return ( bool, date, @@ -95,19 +93,18 @@ def openapi_types(): openapi_types (dict): The key is attribute name and the value is attribute type. """ - lazy_import() return { "name": (str,), # noqa: E501 - "dimensions": (str,), # noqa: E501 - "index_type": (str,), # noqa: E501 + "dimension": (str,), # noqa: E501 + "capacity_mode": (str,), # noqa: E501 "metric": (str,), # noqa: E501 + "index_type": (str,), # noqa: E501 "pods": (int,), # noqa: E501 "replicas": (int,), # noqa: E501 "shards": (int,), # noqa: E501 "pod_type": (str,), # noqa: E501 "index_config": (dict,), # noqa: E501 "metadata_config": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 - "status": (IndexMetaDatabaseStatus,), # noqa: E501 } @cached_property @@ -116,16 +113,16 @@ def discriminator(): attribute_map = { "name": "name", # noqa: E501 - "dimensions": "dimensions", # noqa: E501 - "index_type": "index_type", # noqa: E501 + "dimension": "dimension", # noqa: E501 + "capacity_mode": "capacity_mode", # noqa: E501 "metric": "metric", # noqa: E501 + "index_type": "index_type", # noqa: E501 "pods": "pods", # noqa: E501 "replicas": "replicas", # noqa: E501 "shards": "shards", # noqa: E501 "pod_type": "pod_type", # noqa: E501 "index_config": "index_config", # noqa: E501 "metadata_config": "metadata_config", # noqa: E501 - "status": "status", # noqa: E501 } read_only_vars = {} @@ -134,10 +131,16 @@ def discriminator(): @classmethod @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 + def _from_openapi_data(cls, name, dimension, capacity_mode, *args, **kwargs): # noqa: E501 """IndexMetaDatabase - a model defined in OpenAPI + Args: + name (str): + dimension (str): + capacity_mode (str): + Keyword Args: + metric (str): defaults to "cosine" # noqa: E501 _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. @@ -168,19 +171,16 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - dimensions (str): [optional] # noqa: E501 index_type (str): [optional] # noqa: E501 - metric (str): [optional] # noqa: E501 pods (int): [optional] # noqa: E501 replicas (int): [optional] # noqa: E501 shards (int): [optional] # noqa: E501 pod_type (str): [optional] # noqa: E501 index_config (dict): [optional] # noqa: E501 metadata_config ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501 - status (IndexMetaDatabaseStatus): [optional] # noqa: E501 """ + metric = kwargs.get("metric", "cosine") _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -207,6 +207,10 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.name = name + self.dimension = dimension + self.capacity_mode = capacity_mode + self.metric = metric for var_name, var_value in kwargs.items(): if ( var_name not in self.attribute_map @@ -231,10 +235,16 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 ) @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 + def __init__(self, name, dimension, capacity_mode, *args, **kwargs): # noqa: E501 """IndexMetaDatabase - a model defined in OpenAPI + Args: + name (str): + dimension (str): + capacity_mode (str): + Keyword Args: + metric (str): defaults to "cosine" # noqa: E501 _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. @@ -265,19 +275,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - name (str): [optional] # noqa: E501 - dimensions (str): [optional] # noqa: E501 index_type (str): [optional] # noqa: E501 - metric (str): [optional] # noqa: E501 pods (int): [optional] # noqa: E501 replicas (int): [optional] # noqa: E501 shards (int): [optional] # noqa: E501 pod_type (str): [optional] # noqa: E501 index_config (dict): [optional] # noqa: E501 metadata_config ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501 - status (IndexMetaDatabaseStatus): [optional] # noqa: E501 """ + metric = kwargs.get("metric", "cosine") _check_type = kwargs.pop("_check_type", True) _spec_property_naming = kwargs.pop("_spec_property_naming", False) _path_to_item = kwargs.pop("_path_to_item", ()) @@ -302,6 +309,10 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.name = name + self.dimension = dimension + self.capacity_mode = capacity_mode + self.metric = metric for var_name, var_value in kwargs.items(): if ( var_name not in self.attribute_map diff --git a/pinecone/core/client/model/index_meta_status.py b/pinecone/core/client/model/index_meta_status.py index c9da0214..22a15c96 100644 --- a/pinecone/core/client/model/index_meta_status.py +++ b/pinecone/core/client/model/index_meta_status.py @@ -1,5 +1,5 @@ """ - Pinecone JSON API + Pinecone API No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 @@ -54,7 +54,18 @@ class IndexMetaStatus(ModelNormal): as additional properties values. """ - allowed_values = {} + allowed_values = { + ("state",): { + "INITIALIZING": "Initializing", + "INITIALIZATIONFAILED": "InitializationFailed", + "SCALINGUP": "ScalingUp", + "SCALINGDOWN": "ScalingDown", + "SCALINGUPPODSIZE": "ScalingUpPodSize", + "SCALINGDOWNPODSIZE": "ScalingDownPodSize", + "TERMINATING": "Terminating", + "READY": "Ready", + }, + } validations = {} @@ -89,10 +100,10 @@ def openapi_types(): and the value is attribute type. """ return { - "crashed": ([str],), # noqa: E501 "ready": (bool,), # noqa: E501 + "state": (str,), # noqa: E501 + "host": (str,), # noqa: E501 "port": (int,), # noqa: E501 - "waiting": ([str],), # noqa: E501 } @cached_property @@ -100,10 +111,10 @@ def discriminator(): return None attribute_map = { - "crashed": "crashed", # noqa: E501 "ready": "ready", # noqa: E501 + "state": "state", # noqa: E501 + "host": "host", # noqa: E501 "port": "port", # noqa: E501 - "waiting": "waiting", # noqa: E501 } read_only_vars = {} @@ -112,9 +123,15 @@ def discriminator(): @classmethod @convert_js_args_to_python_args - def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 + def _from_openapi_data(cls, ready, state, host, port, *args, **kwargs): # noqa: E501 """IndexMetaStatus - a model defined in OpenAPI + Args: + ready (bool): + state (str): + host (str): + port (int): + Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be @@ -146,10 +163,6 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - crashed ([str]): [optional] # noqa: E501 - ready (bool): [optional] # noqa: E501 - port (int): [optional] # noqa: E501 - waiting ([str]): [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) @@ -178,6 +191,10 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.ready = ready + self.state = state + self.host = host + self.port = port for var_name, var_value in kwargs.items(): if ( var_name not in self.attribute_map @@ -202,9 +219,15 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 ) @convert_js_args_to_python_args - def __init__(self, *args, **kwargs): # noqa: E501 + def __init__(self, ready, state, host, port, *args, **kwargs): # noqa: E501 """IndexMetaStatus - a model defined in OpenAPI + Args: + ready (bool): + state (str): + host (str): + port (int): + Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be @@ -236,10 +259,6 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - crashed ([str]): [optional] # noqa: E501 - ready (bool): [optional] # noqa: E501 - port (int): [optional] # noqa: E501 - waiting ([str]): [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) @@ -266,6 +285,10 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + self.ready = ready + self.state = state + self.host = host + self.port = port for var_name, var_value in kwargs.items(): if ( var_name not in self.attribute_map diff --git a/pinecone/core/client/model/index_meta_database_status.py b/pinecone/core/client/model/inline_response200.py similarity index 93% rename from pinecone/core/client/model/index_meta_database_status.py rename to pinecone/core/client/model/inline_response200.py index 8430174f..57db43a6 100644 --- a/pinecone/core/client/model/index_meta_database_status.py +++ b/pinecone/core/client/model/inline_response200.py @@ -30,7 +30,13 @@ from pinecone.core.client.exceptions import ApiAttributeError -class IndexMetaDatabaseStatus(ModelNormal): +def lazy_import(): + from pinecone.core.client.model.index_meta import IndexMeta + + globals()["IndexMeta"] = IndexMeta + + +class InlineResponse200(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -54,15 +60,7 @@ class IndexMetaDatabaseStatus(ModelNormal): as additional properties values. """ - allowed_values = { - ("state",): { - "INITIALIZING": "Initializing", - "SCALINGUP": "ScalingUp", - "SCALINGDOWN": "ScalingDown", - "TERMINATING": "Terminating", - "READY": "Ready", - }, - } + allowed_values = {} validations = {} @@ -72,6 +70,7 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ + lazy_import() return ( bool, date, @@ -96,9 +95,9 @@ def openapi_types(): openapi_types (dict): The key is attribute name and the value is attribute type. """ + lazy_import() return { - "ready": (bool,), # noqa: E501 - "state": (str,), # noqa: E501 + "databases": ([IndexMeta],), # noqa: E501 } @cached_property @@ -106,8 +105,7 @@ def discriminator(): return None attribute_map = { - "ready": "ready", # noqa: E501 - "state": "state", # noqa: E501 + "databases": "databases", # noqa: E501 } read_only_vars = {} @@ -117,7 +115,7 @@ def discriminator(): @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 - """IndexMetaDatabaseStatus - a model defined in OpenAPI + """InlineResponse200 - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -150,8 +148,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - ready (bool): [optional] # noqa: E501 - state (str): [optional] # noqa: E501 + databases ([IndexMeta]): [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) @@ -205,7 +202,7 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 - """IndexMetaDatabaseStatus - a model defined in OpenAPI + """InlineResponse200 - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -238,8 +235,7 @@ def __init__(self, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - ready (bool): [optional] # noqa: E501 - state (str): [optional] # noqa: E501 + databases ([IndexMeta]): [optional] # noqa: E501 """ _check_type = kwargs.pop("_check_type", True) diff --git a/pinecone/core/client/model/query_request.py b/pinecone/core/client/model/query_request.py index ec88c383..f9f55c20 100644 --- a/pinecone/core/client/model/query_request.py +++ b/pinecone/core/client/model/query_request.py @@ -283,7 +283,7 @@ def __init__(self, top_k, *args, **kwargs): # noqa: E501 include_metadata (bool): Indicates whether metadata is included in the response as well as the ids.. [optional] if omitted the server will use the default value of False # noqa: E501 queries ([QueryVector]): DEPRECATED. The query vectors. Each `query()` request can contain only one of the parameters `queries`, `vector`, or `id`.. [optional] # noqa: E501 vector ([float]): The query vector. This should be the same length as the dimension of the index being queried. Each `query()` request can contain only one of the parameters `id` or `vector`.. [optional] # noqa: E501 - sparse_vector (SparseValues): The sparse values of the query vector [optional] # noqa: E501 + sparse_vector (SparseValues): [optional] # noqa: E501 id (str): The unique ID of the vector to be used as a query vector. Each `query()` request can contain only one of the parameters `queries`, `vector`, or `id`.. [optional] # noqa: E501 """ diff --git a/pinecone/core/client/model/query_vector.py b/pinecone/core/client/model/query_vector.py index 5d52522e..045eee45 100644 --- a/pinecone/core/client/model/query_vector.py +++ b/pinecone/core/client/model/query_vector.py @@ -165,7 +165,7 @@ def _from_openapi_data(cls, values, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - sparse_values (SparseValues): The sparse data of the query vector [optional] # noqa: E501 + sparse_values (SparseValues): [optional] # noqa: E501 top_k (int): An override for the number of results to return for this query vector.. [optional] # noqa: E501 namespace (str): An override the namespace to search.. [optional] # noqa: E501 filter ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): An override for the metadata filter to apply. This replaces the request-level filter.. [optional] # noqa: E501 @@ -259,7 +259,7 @@ def __init__(self, values, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - sparse_values (SparseValues): This is the sparse data of the vector [optional] # noqa: E501 + sparse_values (SparseValues): [optional] # noqa: E501 top_k (int): An override for the number of results to return for this query vector.. [optional] # noqa: E501 namespace (str): An override the namespace to search.. [optional] # noqa: E501 filter ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): An override for the metadata filter to apply. This replaces the request-level filter.. [optional] # noqa: E501 diff --git a/pinecone/core/client/model/scored_vector.py b/pinecone/core/client/model/scored_vector.py index 98e7f958..30e5fcf1 100644 --- a/pinecone/core/client/model/scored_vector.py +++ b/pinecone/core/client/model/scored_vector.py @@ -166,7 +166,7 @@ def _from_openapi_data(cls, id, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) score (float): This is a measure of similarity between this vector and the query vector. The higher the score, the more they are similar.. [optional] # noqa: E501 values ([float]): This is the vector data, if it is requested.. [optional] # noqa: E501 - sparse_values (SparseValues): the sparse data of the vector [optional] # noqa: E501 + sparse_values (SparseValues): [optional] # noqa: E501 metadata ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): This is the metadata, if it is requested.. [optional] # noqa: E501 """ @@ -260,7 +260,7 @@ def __init__(self, id, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) score (float): This is a measure of similarity between this vector and the query vector. The higher the score, the more they are similar.. [optional] # noqa: E501 values ([float]): This is the vector data, if it is requested.. [optional] # noqa: E501 - sparse_values (SparseValues): This is the sparse data of the vector [optional] # noqa: E501 + sparse_values (SparseValues): [optional] # noqa: E501 metadata ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): This is the metadata, if it is requested.. [optional] # noqa: E501 """ diff --git a/pinecone/core/client/model/sparse_values.py b/pinecone/core/client/model/sparse_values.py index 08bc7133..42a642df 100644 --- a/pinecone/core/client/model/sparse_values.py +++ b/pinecone/core/client/model/sparse_values.py @@ -115,8 +115,8 @@ def _from_openapi_data(cls, indices, values, *args, **kwargs): # noqa: E501 """SparseValues - a model defined in OpenAPI Args: - indices ([int]): - values ([float]): + indices ([int]): The indices of the sparse data. + values ([float]): The corresponding values of the sparse data, which must be with the same length as the indices. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -207,8 +207,8 @@ def __init__(self, indices, values, *args, **kwargs): # noqa: E501 """SparseValues - a model defined in OpenAPI Args: - indices ([int]): - values ([float]): + indices ([int]): The indices of the sparse data. + values ([float]): The corresponding values of the sparse data, which must be with the same length as the indices. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/pinecone/core/client/model/update_request.py b/pinecone/core/client/model/update_request.py index 404ec38d..71ca4dab 100644 --- a/pinecone/core/client/model/update_request.py +++ b/pinecone/core/client/model/update_request.py @@ -166,7 +166,7 @@ def _from_openapi_data(cls, id, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) values ([float]): Vector data.. [optional] # noqa: E501 - sparse_values (SparseValues): This is the sparse data of the vector to update [optional] # noqa: E501 + sparse_values (SparseValues): [optional] # noqa: E501 set_metadata ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): Metadata to *set* for the vector.. [optional] # noqa: E501 namespace (str): Namespace name where to update the vector.. [optional] # noqa: E501 """ @@ -260,7 +260,7 @@ def __init__(self, id, *args, **kwargs): # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) values ([float]): Vector data.. [optional] # noqa: E501 - sparse_values (SparseValues): This is the sparse data of the vector to update [optional] # noqa: E501 + sparse_values (SparseValues): [optional] # noqa: E501 set_metadata ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): Metadata to *set* for the vector.. [optional] # noqa: E501 namespace (str): Namespace name where to update the vector.. [optional] # noqa: E501 """ diff --git a/pinecone/core/client/model/vector.py b/pinecone/core/client/model/vector.py index 58a4dbea..ec5bf635 100644 --- a/pinecone/core/client/model/vector.py +++ b/pinecone/core/client/model/vector.py @@ -164,7 +164,7 @@ def _from_openapi_data(cls, id, values, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - sparse_values (SparseValues): the sparse data of the returned vector [optional] # noqa: E501 + sparse_values (SparseValues): [optional] # noqa: E501 metadata ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): This is the metadata included in the request.. [optional] # noqa: E501 """ @@ -258,7 +258,7 @@ def __init__(self, id, values, *args, **kwargs): # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - sparse_values (SparseValues): This is the sparse data of the vector to update [optional] # noqa: E501 + sparse_values (SparseValues): [optional] # noqa: E501 metadata ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): This is the metadata included in the request.. [optional] # noqa: E501 """ diff --git a/pinecone/core/client/models/__init__.py b/pinecone/core/client/models/__init__.py index 23c7ef03..ca10019c 100644 --- a/pinecone/core/client/models/__init__.py +++ b/pinecone/core/client/models/__init__.py @@ -20,7 +20,8 @@ from pinecone.core.client.model.hnsw_config import HnswConfig from pinecone.core.client.model.index_meta import IndexMeta from pinecone.core.client.model.index_meta_database import IndexMetaDatabase -from pinecone.core.client.model.index_meta_database_status import IndexMetaDatabaseStatus +from pinecone.core.client.model.index_meta_status import IndexMetaStatus +from pinecone.core.client.model.inline_response200 import InlineResponse200 from pinecone.core.client.model.namespace_summary import NamespaceSummary from pinecone.core.client.model.patch_request import PatchRequest from pinecone.core.client.model.protobuf_any import ProtobufAny diff --git a/pinecone/core/client/rest.py b/pinecone/core/client/rest.py index b986712b..cb7376a3 100644 --- a/pinecone/core/client/rest.py +++ b/pinecone/core/client/rest.py @@ -40,11 +40,11 @@ def __init__(self, resp): def getheaders(self): """Returns a dictionary of the response headers.""" - return self.urllib3_response.headers + return self.urllib3_response.getheaders() def getheader(self, name, default=None): """Returns a given response header.""" - return self.urllib3_response.headers.get(name, default) + return self.urllib3_response.getheader(name, default) class RESTClientObject(object): @@ -88,7 +88,7 @@ def __init__(self, configuration, pools_size=4, maxsize=None): key_file=configuration.key_file, proxy_url=configuration.proxy, proxy_headers=configuration.proxy_headers, - **addition_pool_args + **addition_pool_args, ) else: self.pool_manager = urllib3.PoolManager( @@ -98,7 +98,7 @@ def __init__(self, configuration, pools_size=4, maxsize=None): ca_certs=configuration.ssl_ca_cert, cert_file=configuration.cert_file, key_file=configuration.key_file, - **addition_pool_args + **addition_pool_args, ) def request( diff --git a/pinecone/core/exceptions.py b/pinecone/core/exceptions.py deleted file mode 100644 index cd457772..00000000 --- a/pinecone/core/exceptions.py +++ /dev/null @@ -1,6 +0,0 @@ -class PineconeException(Exception): - """The base exception class for all Pinecone client exceptions.""" - - -class PineconeProtocolError(PineconeException): - """Raised when something unexpected happens mid-request/response.""" diff --git a/pinecone/core/grpc/__init__.py b/pinecone/core/grpc/__init__.py deleted file mode 100644 index 8b137891..00000000 --- a/pinecone/core/grpc/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/pinecone/core/grpc/index_grpc.py b/pinecone/core/grpc/index_grpc.py deleted file mode 100644 index f62e9ec8..00000000 --- a/pinecone/core/grpc/index_grpc.py +++ /dev/null @@ -1,843 +0,0 @@ -import logging -import numbers -from abc import ABC, abstractmethod -from functools import wraps -from importlib.util import find_spec -from typing import NamedTuple, Optional, Dict, Iterable, Union, List, Tuple, Any -from collections.abc import Mapping - -import certifi -import grpc -from google.protobuf import json_format -from grpc._channel import _InactiveRpcError, _MultiThreadedRendezvous -from tqdm.autonotebook import tqdm -import json - -from pinecone import FetchResponse, QueryResponse, ScoredVector, SingleQueryResults, DescribeIndexStatsResponse -from pinecone.config import Config -from pinecone.core.client.model.namespace_summary import NamespaceSummary -from pinecone.core.client.model.vector import Vector as _Vector -from pinecone.core.grpc.protos.vector_service_pb2 import ( - Vector as GRPCVector, - QueryVector as GRPCQueryVector, - UpsertRequest, - UpsertResponse, - DeleteRequest, - QueryRequest, - FetchRequest, - UpdateRequest, - DescribeIndexStatsRequest, - DeleteResponse, - UpdateResponse, - SparseValues as GRPCSparseValues, -) -from pinecone.core.client.model.sparse_values import SparseValues -from pinecone.core.grpc.protos.vector_service_pb2_grpc import VectorServiceStub -from pinecone.core.grpc.retry import RetryOnRpcErrorClientInterceptor, RetryConfig -from pinecone.core.utils import _generate_request_id, dict_to_proto_struct, fix_tuple_length -from pinecone.core.utils.constants import ( - MAX_MSG_SIZE, - REQUEST_ID, - CLIENT_VERSION, - REQUIRED_VECTOR_FIELDS, - OPTIONAL_VECTOR_FIELDS, -) -from pinecone.exceptions import PineconeException - -__all__ = ["GRPCIndex", "GRPCVector", "GRPCQueryVector", "GRPCSparseValues"] - -_logger = logging.getLogger(__name__) - - -class GRPCClientConfig(NamedTuple): - """ - GRPC client configuration options. - - :param secure: Whether to use encrypted protocol (SSL). defaults to True. - :type traceroute: bool, optional - :param timeout: defaults to 2 seconds. Fail if gateway doesn't receive response within timeout. - :type timeout: int, optional - :param conn_timeout: defaults to 1. Timeout to retry connection if gRPC is unavailable. 0 is no retry. - :type conn_timeout: int, optional - :param reuse_channel: Whether to reuse the same grpc channel for multiple requests - :type reuse_channel: bool, optional - :param retry_config: RetryConfig indicating how requests should be retried - :type retry_config: RetryConfig, optional - :param grpc_channel_options: A dict of gRPC channel arguments - :type grpc_channel_options: Dict[str, str] - """ - - secure: bool = True - timeout: int = 20 - conn_timeout: int = 1 - reuse_channel: bool = True - retry_config: Optional[RetryConfig] = None - grpc_channel_options: Dict[str, str] = None - - @classmethod - def _from_dict(cls, kwargs: dict): - cls_kwargs = {kk: vv for kk, vv in kwargs.items() if kk in cls._fields} - return cls(**cls_kwargs) - - -class GRPCIndexBase(ABC): - """ - Base class for grpc-based interaction with Pinecone indexes - """ - - _pool = None - - def __init__( - self, index_name: str, channel=None, grpc_config: GRPCClientConfig = None, _endpoint_override: str = None - ): - self.name = index_name - - self.grpc_client_config = grpc_config or GRPCClientConfig() - self.retry_config = self.grpc_client_config.retry_config or RetryConfig() - self.fixed_metadata = {"api-key": Config.API_KEY, "service-name": index_name, "client-version": CLIENT_VERSION} - self._endpoint_override = _endpoint_override - - self.method_config = json.dumps( - { - "methodConfig": [ - { - "name": [{"service": "VectorService.Upsert"}], - "retryPolicy": { - "maxAttempts": 5, - "initialBackoff": "0.1s", - "maxBackoff": "1s", - "backoffMultiplier": 2, - "retryableStatusCodes": ["UNAVAILABLE"], - }, - }, - { - "name": [{"service": "VectorService"}], - "retryPolicy": { - "maxAttempts": 5, - "initialBackoff": "0.1s", - "maxBackoff": "1s", - "backoffMultiplier": 2, - "retryableStatusCodes": ["UNAVAILABLE"], - }, - }, - ] - } - ) - - self._channel = channel or self._gen_channel() - self.stub = self.stub_class(self._channel) - - @property - @abstractmethod - def stub_class(self): - pass - - def _endpoint(self): - return ( - self._endpoint_override - if self._endpoint_override - else f"{self.name}-{Config.PROJECT_NAME}.svc.{Config.ENVIRONMENT}.pinecone.io:443" - ) - - def _gen_channel(self, options=None): - target = self._endpoint() - default_options = { - "grpc.max_send_message_length": MAX_MSG_SIZE, - "grpc.max_receive_message_length": MAX_MSG_SIZE, - "grpc.service_config": self.method_config, - "grpc.enable_retries": True, - } - if self.grpc_client_config.secure: - default_options["grpc.ssl_target_name_override"] = target.split(":")[0] - user_provided_options = options or {} - _options = tuple((k, v) for k, v in {**default_options, **user_provided_options}.items()) - _logger.debug( - "creating new channel with endpoint %s options %s and config %s", target, _options, self.grpc_client_config - ) - if not self.grpc_client_config.secure: - channel = grpc.insecure_channel(target, options=_options) - else: - root_cas = open(certifi.where(), "rb").read() - tls = grpc.ssl_channel_credentials(root_certificates=root_cas) - channel = grpc.secure_channel(target, tls, options=_options) - - return channel - - @property - def channel(self): - """Creates GRPC channel.""" - if self.grpc_client_config.reuse_channel and self._channel and self.grpc_server_on(): - return self._channel - self._channel = self._gen_channel() - return self._channel - - def grpc_server_on(self) -> bool: - try: - grpc.channel_ready_future(self._channel).result(timeout=self.grpc_client_config.conn_timeout) - return True - except grpc.FutureTimeoutError: - return False - - def close(self): - """Closes the connection to the index.""" - try: - self._channel.close() - except TypeError: - pass - - def _wrap_grpc_call( - self, func, request, timeout=None, metadata=None, credentials=None, wait_for_ready=None, compression=None - ): - @wraps(func) - def wrapped(): - user_provided_metadata = metadata or {} - _metadata = tuple( - (k, v) for k, v in {**self.fixed_metadata, **self._request_metadata(), **user_provided_metadata}.items() - ) - try: - return func( - request, - timeout=timeout, - metadata=_metadata, - credentials=credentials, - wait_for_ready=wait_for_ready, - compression=compression, - ) - except _InactiveRpcError as e: - raise PineconeException(e._state.debug_error_string) from e - - return wrapped() - - def _request_metadata(self) -> Dict[str, str]: - return {REQUEST_ID: _generate_request_id()} - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_value, traceback): - self.close() - - -def parse_sparse_values(sparse_values: dict): - return ( - SparseValues(indices=sparse_values["indices"], values=sparse_values["values"]) - if sparse_values - else SparseValues(indices=[], values=[]) - ) - - -def parse_fetch_response(response: dict): - vd = {} - vectors = response.get("vectors") - if not vectors: - return None - for id, vec in vectors.items(): - v_obj = _Vector( - id=vec["id"], - values=vec["values"], - sparse_values=parse_sparse_values(vec.get("sparseValues")), - metadata=vec.get("metadata", None), - _check_type=False, - ) - vd[id] = v_obj - namespace = response.get("namespace", "") - return FetchResponse(vectors=vd, namespace=namespace, _check_type=False) - - -def parse_query_response(response: dict, unary_query: bool, _check_type: bool = False): - res = [] - - # TODO: consider deleting this deprecated case - for match in response.get("results", []): - namespace = match.get("namespace", "") - m = [] - if "matches" in match: - for item in match["matches"]: - sc = ScoredVector( - id=item["id"], - score=item.get("score", 0.0), - values=item.get("values", []), - sparse_values=parse_sparse_values(item.get("sparseValues")), - metadata=item.get("metadata", {}), - ) - m.append(sc) - res.append(SingleQueryResults(matches=m, namespace=namespace)) - - m = [] - for item in response.get("matches", []): - sc = ScoredVector( - id=item["id"], - score=item.get("score", 0.0), - values=item.get("values", []), - sparse_values=parse_sparse_values(item.get("sparseValues")), - metadata=item.get("metadata", {}), - _check_type=_check_type, - ) - m.append(sc) - - kwargs = {"_check_type": _check_type} - if unary_query: - kwargs["namespace"] = response.get("namespace", "") - kwargs["matches"] = m - else: - kwargs["results"] = res - return QueryResponse(**kwargs) - - -def parse_stats_response(response: dict): - fullness = response.get("indexFullness", 0.0) - total_vector_count = response.get("totalVectorCount", 0) - dimension = response.get("dimension", 0) - summaries = response.get("namespaces", {}) - namespace_summaries = {} - for key in summaries: - vc = summaries[key].get("vectorCount", 0) - namespace_summaries[key] = NamespaceSummary(vector_count=vc) - return DescribeIndexStatsResponse( - namespaces=namespace_summaries, - dimension=dimension, - index_fullness=fullness, - total_vector_count=total_vector_count, - _check_type=False, - ) - - -class PineconeGrpcFuture: - def __init__(self, delegate): - self._delegate = delegate - - def cancel(self): - return self._delegate.cancel() - - def cancelled(self): - return self._delegate.cancelled() - - def running(self): - return self._delegate.running() - - def done(self): - return self._delegate.done() - - def add_done_callback(self, fun): - return self._delegate.add_done_callback(fun) - - def result(self, timeout=None): - try: - return self._delegate.result(timeout=timeout) - except _MultiThreadedRendezvous as e: - raise PineconeException(e._state.debug_error_string) from e - - def exception(self, timeout=None): - return self._delegate.exception(timeout=timeout) - - def traceback(self, timeout=None): - return self._delegate.traceback(timeout=timeout) - - -class GRPCIndex(GRPCIndexBase): - - """A client for interacting with a Pinecone index via GRPC API.""" - - @property - def stub_class(self): - return VectorServiceStub - - def upsert( - self, - vectors: Union[List[GRPCVector], List[tuple], List[dict]], - async_req: bool = False, - namespace: Optional[str] = None, - batch_size: Optional[int] = None, - show_progress: bool = True, - **kwargs - ) -> Union[UpsertResponse, PineconeGrpcFuture]: - """ - The upsert operation writes vectors into a namespace. - If a new value is upserted for an existing vector id, it will overwrite the previous value. - - Examples: - >>> index.upsert([('id1', [1.0, 2.0, 3.0], {'key': 'value'}), - ('id2', [1.0, 2.0, 3.0]) - ], - namespace='ns1', async_req=True) - >>> index.upsert([{'id': 'id1', 'values': [1.0, 2.0, 3.0], 'metadata': {'key': 'value'}}, - {'id': 'id2', - 'values': [1.0, 2.0, 3.0], - 'sprase_values': {'indices': [1, 8], 'values': [0.2, 0.4]}, - ]) - >>> index.upsert([GRPCVector(id='id1', values=[1.0, 2.0, 3.0], metadata={'key': 'value'}), - GRPCVector(id='id2', values=[1.0, 2.0, 3.0]), - GRPCVector(id='id3', - values=[1.0, 2.0, 3.0], - sparse_values=GRPCSparseValues(indices=[1, 2], values=[0.2, 0.4]))]) - - Args: - vectors (Union[List[Vector], List[Tuple]]): A list of vectors to upsert. - - A vector can be represented by a 1) GRPCVector object, a 2) tuple or 3) a dictionary - 1) if a tuple is used, it must be of the form (id, values, metadata) or (id, values). - where id is a string, vector is a list of floats, and metadata is a dict. - Examples: ('id1', [1.0, 2.0, 3.0], {'key': 'value'}), ('id2', [1.0, 2.0, 3.0]) - - 2) if a GRPCVector object is used, a GRPCVector object must be of the form - GRPCVector(id, values, metadata), where metadata is an optional argument of type - Dict[str, Union[str, float, int, bool, List[int], List[float], List[str]]] - Examples: GRPCVector(id='id1', values=[1.0, 2.0, 3.0], metadata={'key': 'value'}), - GRPCVector(id='id2', values=[1.0, 2.0, 3.0]), - GRPCVector(id='id3', - values=[1.0, 2.0, 3.0], - sparse_values=GRPCSparseValues(indices=[1, 2], values=[0.2, 0.4])) - - 3) if a dictionary is used, it must be in the form - {'id': str, 'values': List[float], 'sparse_values': {'indices': List[int], 'values': List[float]}, - 'metadata': dict} - - Note: the dimension of each vector must match the dimension of the index. - async_req (bool): If True, the upsert operation will be performed asynchronously. - Cannot be used with batch_size. - Defaults to False. See: https://docs.pinecone.io/docs/performance-tuning [optional] - namespace (str): The namespace to write to. If not specified, the default namespace is used. [optional] - batch_size (int): The number of vectors to upsert in each batch. - Cannot be used with async_req=Ture. - If not specified, all vectors will be upserted in a single batch. [optional] - show_progress (bool): Whether to show a progress bar using tqdm. - Applied only if batch_size is provided. Default is True. - - Returns: UpsertResponse, contains the number of vectors upserted - """ - if async_req and batch_size is not None: - raise ValueError( - "async_req is not supported when batch_size is provided." - "To upsert in parallel, please follow: " - "https://docs.pinecone.io/docs/performance-tuning" - ) - - def _dict_to_grpc_vector(item): - item_keys = set(item.keys()) - if not item_keys.issuperset(REQUIRED_VECTOR_FIELDS): - raise ValueError( - f"Vector dictionary is missing required fields: {list(REQUIRED_VECTOR_FIELDS - item_keys)}" - ) - - excessive_keys = item_keys - (REQUIRED_VECTOR_FIELDS | OPTIONAL_VECTOR_FIELDS) - if len(excessive_keys) > 0: - raise ValueError( - f"Found excess keys in the vector dictionary: {list(excessive_keys)}. " - f"The allowed keys are: {list(REQUIRED_VECTOR_FIELDS | OPTIONAL_VECTOR_FIELDS)}" - ) - - sparse_values = None - if "sparse_values" in item: - if not isinstance(item["sparse_values"], Mapping): - raise TypeError( - f"Column `sparse_values` is expected to be a dictionary, found {type(item['sparse_values'])}" - ) - indices = item["sparse_values"].get("indices", None) - values = item["sparse_values"].get("values", None) - try: - sparse_values = GRPCSparseValues(indices=indices, values=values) - except TypeError as e: - raise TypeError( - "Found unexpected data in column `sparse_values`. " - "Expected format is `'sparse_values': {'indices': List[int], 'values': List[float]}`." - ) from e - - metadata = item.get("metadata", None) - if metadata is not None and not isinstance(metadata, Mapping): - raise TypeError(f"Column `metadata` is expected to be a dictionary, found {type(metadata)}") - - try: - return GRPCVector( - id=item["id"], - values=item["values"], - sparse_values=sparse_values, - metadata=dict_to_proto_struct(metadata), - ) - - except TypeError as e: - # No need to raise a dedicated error for `id` - protobuf's error message is clear enough - if not isinstance(item["values"], Iterable) or not isinstance(item["values"][0], numbers.Real): - raise TypeError(f"Column `values` is expected to be a list of floats") - raise - - def _vector_transform(item): - if isinstance(item, GRPCVector): - return item - elif isinstance(item, tuple): - if len(item) > 3: - raise ValueError( - f"Found a tuple of length {len(item)} which is not supported. " - f"Vectors can be represented as tuples either the form (id, values, metadata) or (id, values). " - f"To pass sparse values please use either dicts or GRPCVector objects as inputs." - ) - id, values, metadata = fix_tuple_length(item, 3) - return GRPCVector(id=id, values=values, metadata=dict_to_proto_struct(metadata) or {}) - elif isinstance(item, Mapping): - return _dict_to_grpc_vector(item) - raise ValueError(f"Invalid vector value passed: cannot interpret type {type(item)}") - - timeout = kwargs.pop("timeout", None) - - vectors = list(map(_vector_transform, vectors)) - if async_req: - args_dict = self._parse_non_empty_args([("namespace", namespace)]) - request = UpsertRequest(vectors=vectors, **args_dict, **kwargs) - future = self._wrap_grpc_call(self.stub.Upsert.future, request, timeout=timeout) - return PineconeGrpcFuture(future) - - if batch_size is None: - return self._upsert_batch(vectors, namespace, timeout=timeout, **kwargs) - - if not isinstance(batch_size, int) or batch_size <= 0: - raise ValueError("batch_size must be a positive integer") - - pbar = tqdm(total=len(vectors), disable=not show_progress, desc="Upserted vectors") - total_upserted = 0 - for i in range(0, len(vectors), batch_size): - batch_result = self._upsert_batch(vectors[i : i + batch_size], namespace, timeout=timeout, **kwargs) - pbar.update(batch_result.upserted_count) - # we can't use here pbar.n for the case show_progress=False - total_upserted += batch_result.upserted_count - - return UpsertResponse(upserted_count=total_upserted) - - def _upsert_batch( - self, vectors: List[GRPCVector], namespace: Optional[str], timeout: Optional[float], **kwargs - ) -> UpsertResponse: - args_dict = self._parse_non_empty_args([("namespace", namespace)]) - request = UpsertRequest(vectors=vectors, **args_dict) - return self._wrap_grpc_call(self.stub.Upsert, request, timeout=timeout, **kwargs) - - def upsert_from_dataframe( - self, - df, - namespace: str = None, - batch_size: int = 500, - use_async_requests: bool = True, - show_progress: bool = True, - ) -> None: - """Upserts a dataframe into the index. - - Args: - df: A pandas dataframe with the following columns: id, vector, and metadata. - namespace: The namespace to upsert into. - batch_size: The number of rows to upsert in a single batch. - use_async_requests: Whether to upsert multiple requests at the same time using asynchronous request mechanism. - Set to `False` - show_progress: Whether to show a progress bar. - """ - try: - import pandas as pd - except ImportError: - raise RuntimeError( - "The `pandas` package is not installed. Please install pandas to use `upsert_from_dataframe()`" - ) - - if not isinstance(df, pd.DataFrame): - raise ValueError(f"Only pandas dataframes are supported. Found: {type(df)}") - - pbar = tqdm(total=len(df), disable=not show_progress, desc="sending upsert requests") - results = [] - for chunk in self._iter_dataframe(df, batch_size=batch_size): - res = self.upsert(vectors=chunk, namespace=namespace, async_req=use_async_requests) - pbar.update(len(chunk)) - results.append(res) - - if use_async_requests: - results = [async_result.result() for async_result in tqdm(results, desc="collecting async responses")] - - upserted_count = 0 - for res in results: - upserted_count += res.upserted_count - - return UpsertResponse(upserted_count=upserted_count) - - @staticmethod - def _iter_dataframe(df, batch_size): - for i in range(0, len(df), batch_size): - batch = df.iloc[i : i + batch_size].to_dict(orient="records") - yield batch - - def delete( - self, - ids: Optional[List[str]] = None, - delete_all: Optional[bool] = None, - namespace: Optional[str] = None, - filter: Optional[Dict[str, Union[str, float, int, bool, List, dict]]] = None, - async_req: bool = False, - **kwargs - ) -> Union[DeleteResponse, PineconeGrpcFuture]: - """ - The Delete operation deletes vectors from the index, from a single namespace. - No error raised if the vector id does not exist. - Note: for any delete call, if namespace is not specified, the default namespace is used. - - Delete can occur in the following mutual exclusive ways: - 1. Delete by ids from a single namespace - 2. Delete all vectors from a single namespace by setting delete_all to True - 3. Delete all vectors from a single namespace by specifying a metadata filter - (note that for this option delete all must be set to False) - - Examples: - >>> index.delete(ids=['id1', 'id2'], namespace='my_namespace') - >>> index.delete(delete_all=True, namespace='my_namespace') - >>> index.delete(filter={'key': 'value'}, namespace='my_namespace', async_req=True) - - Args: - ids (List[str]): Vector ids to delete [optional] - delete_all (bool): This indicates that all vectors in the index namespace should be deleted.. [optional] - Default is False. - namespace (str): The namespace to delete vectors from [optional] - If not specified, the default namespace is used. - filter (Dict[str, Union[str, float, int, bool, List, dict]]): - If specified, the metadata filter here will be used to select the vectors to delete. - This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. - See https://www.pinecone.io/docs/metadata-filtering/.. [optional] - async_req (bool): If True, the delete operation will be performed asynchronously. - Defaults to False. [optional] - - Returns: DeleteResponse (contains no data) or a PineconeGrpcFuture object if async_req is True. - """ - - if filter is not None: - filter = dict_to_proto_struct(filter) - - args_dict = self._parse_non_empty_args( - [("ids", ids), ("delete_all", delete_all), ("namespace", namespace), ("filter", filter)] - ) - timeout = kwargs.pop("timeout", None) - - request = DeleteRequest(**args_dict, **kwargs) - if async_req: - future = self._wrap_grpc_call(self.stub.Delete.future, request, timeout=timeout) - return PineconeGrpcFuture(future) - else: - return self._wrap_grpc_call(self.stub.Delete, request, timeout=timeout) - - def fetch(self, ids: Optional[List[str]], namespace: Optional[str] = None, **kwargs) -> FetchResponse: - """ - The fetch operation looks up and returns vectors, by ID, from a single namespace. - The returned vectors include the vector data and/or metadata. - - Examples: - >>> index.fetch(ids=['id1', 'id2'], namespace='my_namespace') - >>> index.fetch(ids=['id1', 'id2']) - - Args: - ids (List[str]): The vector IDs to fetch. - namespace (str): The namespace to fetch vectors from. - If not specified, the default namespace is used. [optional] - - Returns: FetchResponse object which contains the list of Vector objects, and namespace name. - """ - timeout = kwargs.pop("timeout", None) - - args_dict = self._parse_non_empty_args([("namespace", namespace)]) - - request = FetchRequest(ids=ids, **args_dict, **kwargs) - response = self._wrap_grpc_call(self.stub.Fetch, request, timeout=timeout) - json_response = json_format.MessageToDict(response) - return parse_fetch_response(json_response) - - def query( - self, - vector: Optional[List[float]] = None, - id: Optional[str] = None, - queries: Optional[Union[List[GRPCQueryVector], List[Tuple]]] = None, - namespace: Optional[str] = None, - top_k: Optional[int] = None, - filter: Optional[Dict[str, Union[str, float, int, bool, List, dict]]] = None, - include_values: Optional[bool] = None, - include_metadata: Optional[bool] = None, - sparse_vector: Optional[Union[GRPCSparseValues, Dict[str, Union[List[float], List[int]]]]] = None, - **kwargs - ) -> QueryResponse: - """ - The Query operation searches a namespace, using a query vector. - It retrieves the ids of the most similar items in a namespace, along with their similarity scores. - - Examples: - >>> index.query(vector=[1, 2, 3], top_k=10, namespace='my_namespace') - >>> index.query(id='id1', top_k=10, namespace='my_namespace') - >>> index.query(vector=[1, 2, 3], top_k=10, namespace='my_namespace', filter={'key': 'value'}) - >>> index.query(id='id1', top_k=10, namespace='my_namespace', include_metadata=True, include_values=True) - >>> index.query(vector=[1, 2, 3], sparse_vector={'indices': [1, 2], 'values': [0.2, 0.4]}, - >>> top_k=10, namespace='my_namespace') - >>> index.query(vector=[1, 2, 3], sparse_vector=GRPCSparseValues([1, 2], [0.2, 0.4]), - >>> top_k=10, namespace='my_namespace') - - Args: - vector (List[float]): The query vector. This should be the same length as the dimension of the index - being queried. Each `query()` request can contain only one of the parameters - `queries`, `id` or `vector`.. [optional] - id (str): The unique ID of the vector to be used as a query vector. - Each `query()` request can contain only one of the parameters - `queries`, `vector`, or `id`.. [optional] - queries ([GRPCQueryVector]): DEPRECATED. The query vectors. - Each `query()` request can contain only one of the parameters - `queries`, `vector`, or `id`.. [optional] - top_k (int): The number of results to return for each query. Must be an integer greater than 1. - namespace (str): The namespace to fetch vectors from. - If not specified, the default namespace is used. [optional] - filter (Dict[str, Union[str, float, int, bool, List, dict]]): - The filter to apply. You can use vector metadata to limit your search. - See https://www.pinecone.io/docs/metadata-filtering/.. [optional] - include_values (bool): Indicates whether vector values are included in the response. - If omitted the server will use the default value of False [optional] - include_metadata (bool): Indicates whether metadata is included in the response as well as the ids. - If omitted the server will use the default value of False [optional] - sparse_vector: (Union[SparseValues, Dict[str, Union[List[float], List[int]]]]): sparse values of the query vector. - Expected to be either a GRPCSparseValues object or a dict of the form: - {'indices': List[int], 'values': List[float]}, where the lists each have the same length. - - Returns: QueryResponse object which contains the list of the closest vectors as ScoredVector objects, - and namespace name. - """ - - def _query_transform(item): - if isinstance(item, GRPCQueryVector): - return item - if isinstance(item, tuple): - values, filter = fix_tuple_length(item, 2) - filter = dict_to_proto_struct(filter) - return GRPCQueryVector(values=values, filter=filter) - if isinstance(item, Iterable): - return GRPCQueryVector(values=item) - raise ValueError(f"Invalid query vector value passed: cannot interpret type {type(item)}") - - queries = list(map(_query_transform, queries)) if queries is not None else None - - if filter is not None: - filter = dict_to_proto_struct(filter) - - sparse_vector = self._parse_sparse_values_arg(sparse_vector) - args_dict = self._parse_non_empty_args( - [ - ("vector", vector), - ("id", id), - ("queries", queries), - ("namespace", namespace), - ("top_k", top_k), - ("filter", filter), - ("include_values", include_values), - ("include_metadata", include_metadata), - ("sparse_vector", sparse_vector), - ] - ) - - request = QueryRequest(**args_dict) - - timeout = kwargs.pop("timeout", None) - response = self._wrap_grpc_call(self.stub.Query, request, timeout=timeout) - json_response = json_format.MessageToDict(response) - return parse_query_response(json_response, vector is not None or id, _check_type=False) - - def update( - self, - id: str, - async_req: bool = False, - values: Optional[List[float]] = None, - set_metadata: Optional[Dict[str, Union[str, float, int, bool, List[int], List[float], List[str]]]] = None, - namespace: Optional[str] = None, - sparse_values: Optional[Union[GRPCSparseValues, Dict[str, Union[List[float], List[int]]]]] = None, - **kwargs - ) -> Union[UpdateResponse, PineconeGrpcFuture]: - """ - The Update operation updates vector in a namespace. - If a value is included, it will overwrite the previous value. - If a set_metadata is included, - the values of the fields specified in it will be added or overwrite the previous value. - - Examples: - >>> index.update(id='id1', values=[1, 2, 3], namespace='my_namespace') - >>> index.update(id='id1', set_metadata={'key': 'value'}, namespace='my_namespace', async_req=True) - >>> index.update(id='id1', values=[1, 2, 3], sparse_values={'indices': [1, 2], 'values': [0.2, 0.4]}, - >>> namespace='my_namespace') - >>> index.update(id='id1', values=[1, 2, 3], sparse_values=GRPCSparseValues(indices=[1, 2], values=[0.2, 0.4]), - >>> namespace='my_namespace') - - Args: - id (str): Vector's unique id. - async_req (bool): If True, the update operation will be performed asynchronously. - Defaults to False. [optional] - values (List[float]): vector values to set. [optional] - set_metadata (Dict[str, Union[str, float, int, bool, List[int], List[float], List[str]]]]): - metadata to set for vector. [optional] - namespace (str): Namespace name where to update the vector.. [optional] - sparse_values: (Dict[str, Union[List[float], List[int]]]): sparse values to update for the vector. - Expected to be either a GRPCSparseValues object or a dict of the form: - {'indices': List[int], 'values': List[float]} where the lists each have the same length. - - - Returns: UpdateResponse (contains no data) or a PineconeGrpcFuture object if async_req is True. - """ - if set_metadata is not None: - set_metadata = dict_to_proto_struct(set_metadata) - timeout = kwargs.pop("timeout", None) - - sparse_values = self._parse_sparse_values_arg(sparse_values) - args_dict = self._parse_non_empty_args( - [ - ("values", values), - ("set_metadata", set_metadata), - ("namespace", namespace), - ("sparse_values", sparse_values), - ] - ) - - request = UpdateRequest(id=id, **args_dict) - if async_req: - future = self._wrap_grpc_call(self.stub.Update.future, request, timeout=timeout) - return PineconeGrpcFuture(future) - else: - return self._wrap_grpc_call(self.stub.Update, request, timeout=timeout) - - def describe_index_stats( - self, filter: Optional[Dict[str, Union[str, float, int, bool, List, dict]]] = None, **kwargs - ) -> DescribeIndexStatsResponse: - """ - The DescribeIndexStats operation returns statistics about the index's contents. - For example: The vector count per namespace and the number of dimensions. - - Examples: - >>> index.describe_index_stats() - >>> index.describe_index_stats(filter={'key': 'value'}) - - Args: - filter (Dict[str, Union[str, float, int, bool, List, dict]]): - If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. - See https://www.pinecone.io/docs/metadata-filtering/.. [optional] - - Returns: DescribeIndexStatsResponse object which contains stats about the index. - """ - if filter is not None: - filter = dict_to_proto_struct(filter) - args_dict = self._parse_non_empty_args([("filter", filter)]) - timeout = kwargs.pop("timeout", None) - - request = DescribeIndexStatsRequest(**args_dict) - response = self._wrap_grpc_call(self.stub.DescribeIndexStats, request, timeout=timeout) - json_response = json_format.MessageToDict(response) - return parse_stats_response(json_response) - - @staticmethod - def _parse_non_empty_args(args: List[Tuple[str, Any]]) -> Dict[str, Any]: - return {arg_name: val for arg_name, val in args if val is not None} - - @staticmethod - def _parse_sparse_values_arg( - sparse_values: Optional[Union[GRPCSparseValues, Dict[str, Union[List[float], List[int]]]]] - ) -> Optional[GRPCSparseValues]: - if sparse_values is None: - return None - - if isinstance(sparse_values, GRPCSparseValues): - return sparse_values - - if not isinstance(sparse_values, dict) or "indices" not in sparse_values or "values" not in sparse_values: - raise ValueError( - "Invalid sparse values argument. Expected a dict of: {'indices': List[int], 'values': List[float]}." - f"Received: {sparse_values}" - ) - - return GRPCSparseValues(indices=sparse_values["indices"], values=sparse_values["values"]) diff --git a/pinecone/core/grpc/protos/__init__.py b/pinecone/core/grpc/protos/__init__.py deleted file mode 100644 index 8b137891..00000000 --- a/pinecone/core/grpc/protos/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/pinecone/core/grpc/protos/vector_column_service_pb2.py b/pinecone/core/grpc/protos/vector_column_service_pb2.py deleted file mode 100644 index 1b4ebadc..00000000 --- a/pinecone/core/grpc/protos/vector_column_service_pb2.py +++ /dev/null @@ -1,1293 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: vector_column_service.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="vector_column_service.proto", - package="pinecone_columnar", - syntax="proto3", - serialized_options=b"\n\021io.pinecone.protoZ+github.com/pinecone-io/go-pinecone/pinecone", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n\x1bvector_column_service.proto\x12\x11pinecone_columnar\x1a\x1cgoogle/protobuf/struct.proto"K\n\x07NdArray\x12\x0e\n\x06\x62uffer\x18\x01 \x01(\x0c\x12\r\n\x05shape\x18\x02 \x03(\r\x12\r\n\x05\x64type\x18\x03 \x01(\t\x12\x12\n\ncompressed\x18\x04 \x01(\x08"\xbc\x01\n\rScoredResults\x12\'\n\x03ids\x18\x01 \x01(\x0b\x32\x1a.pinecone_columnar.NdArray\x12*\n\x06scores\x18\x02 \x01(\x0b\x32\x1a.pinecone_columnar.NdArray\x12(\n\x04\x64\x61ta\x18\x03 \x01(\x0b\x32\x1a.pinecone_columnar.NdArray\x12,\n\x08metadata\x18\x04 \x01(\x0b\x32\x1a.pinecone_columnar.NdArray"\x84\x01\n\rUpsertRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0b\n\x03ids\x18\x02 \x03(\t\x12(\n\x04\x64\x61ta\x18\x03 \x01(\x0b\x32\x1a.pinecone_columnar.NdArray\x12)\n\x08metadata\x18\x04 \x03(\x0b\x32\x17.google.protobuf.Struct"(\n\x0eUpsertResponse\x12\x16\n\x0eupserted_count\x18\x01 \x01(\r"C\n\rDeleteRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0b\n\x03ids\x18\x02 \x03(\t\x12\x12\n\ndelete_all\x18\x03 \x01(\x08"\x10\n\x0e\x44\x65leteResponse".\n\x0c\x46\x65tchRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0b\n\x03ids\x18\x02 \x03(\t"\x87\x01\n\rFetchResponse\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0b\n\x03ids\x18\x02 \x03(\t\x12+\n\x07vectors\x18\x03 \x03(\x0b\x32\x1a.pinecone_columnar.NdArray\x12)\n\x08metadata\x18\x04 \x03(\x0b\x32\x17.google.protobuf.Struct"\xa1\x02\n\x0cQueryRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x1b\n\x13namespace_overrides\x18\x06 \x03(\t\x12\r\n\x05top_k\x18\x02 \x01(\r\x12\x17\n\x0ftop_k_overrides\x18\x05 \x03(\r\x12\'\n\x06\x66ilter\x18\x07 \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x31\n\x10\x66ilter_overrides\x18\x08 \x03(\x0b\x32\x17.google.protobuf.Struct\x12\x16\n\x0einclude_values\x18\x03 \x01(\x08\x12\x18\n\x10include_metadata\x18\t \x01(\x08\x12+\n\x07queries\x18\x04 \x01(\x0b\x32\x1a.pinecone_columnar.NdArray"B\n\rQueryResponse\x12\x31\n\x07matches\x18\x01 \x03(\x0b\x32 .pinecone_columnar.ScoredResults"\x1b\n\x19\x44\x65scribeIndexStatsRequest"(\n\x10NamespaceSummary\x12\x14\n\x0cvector_count\x18\x01 \x01(\r"\xda\x01\n\x1a\x44\x65scribeIndexStatsResponse\x12Q\n\nnamespaces\x18\x01 \x03(\x0b\x32=.pinecone_columnar.DescribeIndexStatsResponse.NamespacesEntry\x12\x11\n\tdimension\x18\x02 \x01(\r\x1aV\n\x0fNamespacesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x32\n\x05value\x18\x02 \x01(\x0b\x32#.pinecone_columnar.NamespaceSummary:\x02\x38\x01\x32\xc8\x03\n\x13VectorColumnService\x12O\n\x06Upsert\x12 .pinecone_columnar.UpsertRequest\x1a!.pinecone_columnar.UpsertResponse"\x00\x12O\n\x06\x44\x65lete\x12 .pinecone_columnar.DeleteRequest\x1a!.pinecone_columnar.DeleteResponse"\x00\x12L\n\x05\x46\x65tch\x12\x1f.pinecone_columnar.FetchRequest\x1a .pinecone_columnar.FetchResponse"\x00\x12L\n\x05Query\x12\x1f.pinecone_columnar.QueryRequest\x1a .pinecone_columnar.QueryResponse"\x00\x12s\n\x12\x44\x65scribeIndexStats\x12,.pinecone_columnar.DescribeIndexStatsRequest\x1a-.pinecone_columnar.DescribeIndexStatsResponse"\x00\x42@\n\x11io.pinecone.protoZ+github.com/pinecone-io/go-pinecone/pineconeb\x06proto3', - dependencies=[ - google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, - ], -) - - -_NDARRAY = _descriptor.Descriptor( - name="NdArray", - full_name="pinecone_columnar.NdArray", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="buffer", - full_name="pinecone_columnar.NdArray.buffer", - index=0, - number=1, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="shape", - full_name="pinecone_columnar.NdArray.shape", - index=1, - number=2, - type=13, - cpp_type=3, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="dtype", - full_name="pinecone_columnar.NdArray.dtype", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="compressed", - full_name="pinecone_columnar.NdArray.compressed", - index=3, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=80, - serialized_end=155, -) - - -_SCOREDRESULTS = _descriptor.Descriptor( - name="ScoredResults", - full_name="pinecone_columnar.ScoredResults", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="ids", - full_name="pinecone_columnar.ScoredResults.ids", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="scores", - full_name="pinecone_columnar.ScoredResults.scores", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="data", - full_name="pinecone_columnar.ScoredResults.data", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="metadata", - full_name="pinecone_columnar.ScoredResults.metadata", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=158, - serialized_end=346, -) - - -_UPSERTREQUEST = _descriptor.Descriptor( - name="UpsertRequest", - full_name="pinecone_columnar.UpsertRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="namespace", - full_name="pinecone_columnar.UpsertRequest.namespace", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ids", - full_name="pinecone_columnar.UpsertRequest.ids", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="data", - full_name="pinecone_columnar.UpsertRequest.data", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="metadata", - full_name="pinecone_columnar.UpsertRequest.metadata", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=349, - serialized_end=481, -) - - -_UPSERTRESPONSE = _descriptor.Descriptor( - name="UpsertResponse", - full_name="pinecone_columnar.UpsertResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="upserted_count", - full_name="pinecone_columnar.UpsertResponse.upserted_count", - index=0, - number=1, - type=13, - cpp_type=3, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=483, - serialized_end=523, -) - - -_DELETEREQUEST = _descriptor.Descriptor( - name="DeleteRequest", - full_name="pinecone_columnar.DeleteRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="namespace", - full_name="pinecone_columnar.DeleteRequest.namespace", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ids", - full_name="pinecone_columnar.DeleteRequest.ids", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="delete_all", - full_name="pinecone_columnar.DeleteRequest.delete_all", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=525, - serialized_end=592, -) - - -_DELETERESPONSE = _descriptor.Descriptor( - name="DeleteResponse", - full_name="pinecone_columnar.DeleteResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=594, - serialized_end=610, -) - - -_FETCHREQUEST = _descriptor.Descriptor( - name="FetchRequest", - full_name="pinecone_columnar.FetchRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="namespace", - full_name="pinecone_columnar.FetchRequest.namespace", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ids", - full_name="pinecone_columnar.FetchRequest.ids", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=612, - serialized_end=658, -) - - -_FETCHRESPONSE = _descriptor.Descriptor( - name="FetchResponse", - full_name="pinecone_columnar.FetchResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="namespace", - full_name="pinecone_columnar.FetchResponse.namespace", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ids", - full_name="pinecone_columnar.FetchResponse.ids", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="vectors", - full_name="pinecone_columnar.FetchResponse.vectors", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="metadata", - full_name="pinecone_columnar.FetchResponse.metadata", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=661, - serialized_end=796, -) - - -_QUERYREQUEST = _descriptor.Descriptor( - name="QueryRequest", - full_name="pinecone_columnar.QueryRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="namespace", - full_name="pinecone_columnar.QueryRequest.namespace", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="namespace_overrides", - full_name="pinecone_columnar.QueryRequest.namespace_overrides", - index=1, - number=6, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="top_k", - full_name="pinecone_columnar.QueryRequest.top_k", - index=2, - number=2, - type=13, - cpp_type=3, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="top_k_overrides", - full_name="pinecone_columnar.QueryRequest.top_k_overrides", - index=3, - number=5, - type=13, - cpp_type=3, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="pinecone_columnar.QueryRequest.filter", - index=4, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter_overrides", - full_name="pinecone_columnar.QueryRequest.filter_overrides", - index=5, - number=8, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="include_values", - full_name="pinecone_columnar.QueryRequest.include_values", - index=6, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="include_metadata", - full_name="pinecone_columnar.QueryRequest.include_metadata", - index=7, - number=9, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="queries", - full_name="pinecone_columnar.QueryRequest.queries", - index=8, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=799, - serialized_end=1088, -) - - -_QUERYRESPONSE = _descriptor.Descriptor( - name="QueryResponse", - full_name="pinecone_columnar.QueryResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="matches", - full_name="pinecone_columnar.QueryResponse.matches", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1090, - serialized_end=1156, -) - - -_DESCRIBEINDEXSTATSREQUEST = _descriptor.Descriptor( - name="DescribeIndexStatsRequest", - full_name="pinecone_columnar.DescribeIndexStatsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1158, - serialized_end=1185, -) - - -_NAMESPACESUMMARY = _descriptor.Descriptor( - name="NamespaceSummary", - full_name="pinecone_columnar.NamespaceSummary", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="vector_count", - full_name="pinecone_columnar.NamespaceSummary.vector_count", - index=0, - number=1, - type=13, - cpp_type=3, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1187, - serialized_end=1227, -) - - -_DESCRIBEINDEXSTATSRESPONSE_NAMESPACESENTRY = _descriptor.Descriptor( - name="NamespacesEntry", - full_name="pinecone_columnar.DescribeIndexStatsResponse.NamespacesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="pinecone_columnar.DescribeIndexStatsResponse.NamespacesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="pinecone_columnar.DescribeIndexStatsResponse.NamespacesEntry.value", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1362, - serialized_end=1448, -) - -_DESCRIBEINDEXSTATSRESPONSE = _descriptor.Descriptor( - name="DescribeIndexStatsResponse", - full_name="pinecone_columnar.DescribeIndexStatsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="namespaces", - full_name="pinecone_columnar.DescribeIndexStatsResponse.namespaces", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="dimension", - full_name="pinecone_columnar.DescribeIndexStatsResponse.dimension", - index=1, - number=2, - type=13, - cpp_type=3, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[ - _DESCRIBEINDEXSTATSRESPONSE_NAMESPACESENTRY, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1230, - serialized_end=1448, -) - -_SCOREDRESULTS.fields_by_name["ids"].message_type = _NDARRAY -_SCOREDRESULTS.fields_by_name["scores"].message_type = _NDARRAY -_SCOREDRESULTS.fields_by_name["data"].message_type = _NDARRAY -_SCOREDRESULTS.fields_by_name["metadata"].message_type = _NDARRAY -_UPSERTREQUEST.fields_by_name["data"].message_type = _NDARRAY -_UPSERTREQUEST.fields_by_name["metadata"].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT -_FETCHRESPONSE.fields_by_name["vectors"].message_type = _NDARRAY -_FETCHRESPONSE.fields_by_name["metadata"].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT -_QUERYREQUEST.fields_by_name["filter"].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT -_QUERYREQUEST.fields_by_name["filter_overrides"].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT -_QUERYREQUEST.fields_by_name["queries"].message_type = _NDARRAY -_QUERYRESPONSE.fields_by_name["matches"].message_type = _SCOREDRESULTS -_DESCRIBEINDEXSTATSRESPONSE_NAMESPACESENTRY.fields_by_name["value"].message_type = _NAMESPACESUMMARY -_DESCRIBEINDEXSTATSRESPONSE_NAMESPACESENTRY.containing_type = _DESCRIBEINDEXSTATSRESPONSE -_DESCRIBEINDEXSTATSRESPONSE.fields_by_name["namespaces"].message_type = _DESCRIBEINDEXSTATSRESPONSE_NAMESPACESENTRY -DESCRIPTOR.message_types_by_name["NdArray"] = _NDARRAY -DESCRIPTOR.message_types_by_name["ScoredResults"] = _SCOREDRESULTS -DESCRIPTOR.message_types_by_name["UpsertRequest"] = _UPSERTREQUEST -DESCRIPTOR.message_types_by_name["UpsertResponse"] = _UPSERTRESPONSE -DESCRIPTOR.message_types_by_name["DeleteRequest"] = _DELETEREQUEST -DESCRIPTOR.message_types_by_name["DeleteResponse"] = _DELETERESPONSE -DESCRIPTOR.message_types_by_name["FetchRequest"] = _FETCHREQUEST -DESCRIPTOR.message_types_by_name["FetchResponse"] = _FETCHRESPONSE -DESCRIPTOR.message_types_by_name["QueryRequest"] = _QUERYREQUEST -DESCRIPTOR.message_types_by_name["QueryResponse"] = _QUERYRESPONSE -DESCRIPTOR.message_types_by_name["DescribeIndexStatsRequest"] = _DESCRIBEINDEXSTATSREQUEST -DESCRIPTOR.message_types_by_name["NamespaceSummary"] = _NAMESPACESUMMARY -DESCRIPTOR.message_types_by_name["DescribeIndexStatsResponse"] = _DESCRIBEINDEXSTATSRESPONSE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -NdArray = _reflection.GeneratedProtocolMessageType( - "NdArray", - (_message.Message,), - { - "DESCRIPTOR": _NDARRAY, - "__module__": "vector_column_service_pb2" - # @@protoc_insertion_point(class_scope:pinecone_columnar.NdArray) - }, -) -_sym_db.RegisterMessage(NdArray) - -ScoredResults = _reflection.GeneratedProtocolMessageType( - "ScoredResults", - (_message.Message,), - { - "DESCRIPTOR": _SCOREDRESULTS, - "__module__": "vector_column_service_pb2" - # @@protoc_insertion_point(class_scope:pinecone_columnar.ScoredResults) - }, -) -_sym_db.RegisterMessage(ScoredResults) - -UpsertRequest = _reflection.GeneratedProtocolMessageType( - "UpsertRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPSERTREQUEST, - "__module__": "vector_column_service_pb2" - # @@protoc_insertion_point(class_scope:pinecone_columnar.UpsertRequest) - }, -) -_sym_db.RegisterMessage(UpsertRequest) - -UpsertResponse = _reflection.GeneratedProtocolMessageType( - "UpsertResponse", - (_message.Message,), - { - "DESCRIPTOR": _UPSERTRESPONSE, - "__module__": "vector_column_service_pb2" - # @@protoc_insertion_point(class_scope:pinecone_columnar.UpsertResponse) - }, -) -_sym_db.RegisterMessage(UpsertResponse) - -DeleteRequest = _reflection.GeneratedProtocolMessageType( - "DeleteRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETEREQUEST, - "__module__": "vector_column_service_pb2" - # @@protoc_insertion_point(class_scope:pinecone_columnar.DeleteRequest) - }, -) -_sym_db.RegisterMessage(DeleteRequest) - -DeleteResponse = _reflection.GeneratedProtocolMessageType( - "DeleteResponse", - (_message.Message,), - { - "DESCRIPTOR": _DELETERESPONSE, - "__module__": "vector_column_service_pb2" - # @@protoc_insertion_point(class_scope:pinecone_columnar.DeleteResponse) - }, -) -_sym_db.RegisterMessage(DeleteResponse) - -FetchRequest = _reflection.GeneratedProtocolMessageType( - "FetchRequest", - (_message.Message,), - { - "DESCRIPTOR": _FETCHREQUEST, - "__module__": "vector_column_service_pb2" - # @@protoc_insertion_point(class_scope:pinecone_columnar.FetchRequest) - }, -) -_sym_db.RegisterMessage(FetchRequest) - -FetchResponse = _reflection.GeneratedProtocolMessageType( - "FetchResponse", - (_message.Message,), - { - "DESCRIPTOR": _FETCHRESPONSE, - "__module__": "vector_column_service_pb2" - # @@protoc_insertion_point(class_scope:pinecone_columnar.FetchResponse) - }, -) -_sym_db.RegisterMessage(FetchResponse) - -QueryRequest = _reflection.GeneratedProtocolMessageType( - "QueryRequest", - (_message.Message,), - { - "DESCRIPTOR": _QUERYREQUEST, - "__module__": "vector_column_service_pb2" - # @@protoc_insertion_point(class_scope:pinecone_columnar.QueryRequest) - }, -) -_sym_db.RegisterMessage(QueryRequest) - -QueryResponse = _reflection.GeneratedProtocolMessageType( - "QueryResponse", - (_message.Message,), - { - "DESCRIPTOR": _QUERYRESPONSE, - "__module__": "vector_column_service_pb2" - # @@protoc_insertion_point(class_scope:pinecone_columnar.QueryResponse) - }, -) -_sym_db.RegisterMessage(QueryResponse) - -DescribeIndexStatsRequest = _reflection.GeneratedProtocolMessageType( - "DescribeIndexStatsRequest", - (_message.Message,), - { - "DESCRIPTOR": _DESCRIBEINDEXSTATSREQUEST, - "__module__": "vector_column_service_pb2" - # @@protoc_insertion_point(class_scope:pinecone_columnar.DescribeIndexStatsRequest) - }, -) -_sym_db.RegisterMessage(DescribeIndexStatsRequest) - -NamespaceSummary = _reflection.GeneratedProtocolMessageType( - "NamespaceSummary", - (_message.Message,), - { - "DESCRIPTOR": _NAMESPACESUMMARY, - "__module__": "vector_column_service_pb2" - # @@protoc_insertion_point(class_scope:pinecone_columnar.NamespaceSummary) - }, -) -_sym_db.RegisterMessage(NamespaceSummary) - -DescribeIndexStatsResponse = _reflection.GeneratedProtocolMessageType( - "DescribeIndexStatsResponse", - (_message.Message,), - { - "NamespacesEntry": _reflection.GeneratedProtocolMessageType( - "NamespacesEntry", - (_message.Message,), - { - "DESCRIPTOR": _DESCRIBEINDEXSTATSRESPONSE_NAMESPACESENTRY, - "__module__": "vector_column_service_pb2" - # @@protoc_insertion_point(class_scope:pinecone_columnar.DescribeIndexStatsResponse.NamespacesEntry) - }, - ), - "DESCRIPTOR": _DESCRIBEINDEXSTATSRESPONSE, - "__module__": "vector_column_service_pb2" - # @@protoc_insertion_point(class_scope:pinecone_columnar.DescribeIndexStatsResponse) - }, -) -_sym_db.RegisterMessage(DescribeIndexStatsResponse) -_sym_db.RegisterMessage(DescribeIndexStatsResponse.NamespacesEntry) - - -DESCRIPTOR._options = None -_DESCRIBEINDEXSTATSRESPONSE_NAMESPACESENTRY._options = None - -_VECTORCOLUMNSERVICE = _descriptor.ServiceDescriptor( - name="VectorColumnService", - full_name="pinecone_columnar.VectorColumnService", - file=DESCRIPTOR, - index=0, - serialized_options=None, - create_key=_descriptor._internal_create_key, - serialized_start=1451, - serialized_end=1907, - methods=[ - _descriptor.MethodDescriptor( - name="Upsert", - full_name="pinecone_columnar.VectorColumnService.Upsert", - index=0, - containing_service=None, - input_type=_UPSERTREQUEST, - output_type=_UPSERTRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="Delete", - full_name="pinecone_columnar.VectorColumnService.Delete", - index=1, - containing_service=None, - input_type=_DELETEREQUEST, - output_type=_DELETERESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="Fetch", - full_name="pinecone_columnar.VectorColumnService.Fetch", - index=2, - containing_service=None, - input_type=_FETCHREQUEST, - output_type=_FETCHRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="Query", - full_name="pinecone_columnar.VectorColumnService.Query", - index=3, - containing_service=None, - input_type=_QUERYREQUEST, - output_type=_QUERYRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DescribeIndexStats", - full_name="pinecone_columnar.VectorColumnService.DescribeIndexStats", - index=4, - containing_service=None, - input_type=_DESCRIBEINDEXSTATSREQUEST, - output_type=_DESCRIBEINDEXSTATSRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - ], -) -_sym_db.RegisterServiceDescriptor(_VECTORCOLUMNSERVICE) - -DESCRIPTOR.services_by_name["VectorColumnService"] = _VECTORCOLUMNSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/pinecone/core/grpc/protos/vector_column_service_pb2.pyi b/pinecone/core/grpc/protos/vector_column_service_pb2.pyi deleted file mode 100644 index 3b85ddb1..00000000 --- a/pinecone/core/grpc/protos/vector_column_service_pb2.pyi +++ /dev/null @@ -1,350 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from google.protobuf.descriptor import ( - Descriptor as google___protobuf___descriptor___Descriptor, - FileDescriptor as google___protobuf___descriptor___FileDescriptor, -) - -from google.protobuf.internal.containers import ( - RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, - RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from google.protobuf.struct_pb2 import ( - Struct as google___protobuf___struct_pb2___Struct, -) - -from typing import ( - Iterable as typing___Iterable, - Mapping as typing___Mapping, - MutableMapping as typing___MutableMapping, - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - -builtin___bool = bool -builtin___bytes = bytes -builtin___float = float -builtin___int = int - -DESCRIPTOR: google___protobuf___descriptor___FileDescriptor = ... - -class NdArray(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - buffer: builtin___bytes = ... - shape: google___protobuf___internal___containers___RepeatedScalarFieldContainer[builtin___int] = ... - dtype: typing___Text = ... - compressed: builtin___bool = ... - - def __init__( - self, - *, - buffer: typing___Optional[builtin___bytes] = None, - shape: typing___Optional[typing___Iterable[builtin___int]] = None, - dtype: typing___Optional[typing___Text] = None, - compressed: typing___Optional[builtin___bool] = None, - ) -> None: ... - def ClearField( - self, - field_name: typing_extensions___Literal[ - "buffer", b"buffer", "compressed", b"compressed", "dtype", b"dtype", "shape", b"shape" - ], - ) -> None: ... - -type___NdArray = NdArray - -class ScoredResults(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - - @property - def ids(self) -> type___NdArray: ... - @property - def scores(self) -> type___NdArray: ... - @property - def data(self) -> type___NdArray: ... - @property - def metadata(self) -> type___NdArray: ... - def __init__( - self, - *, - ids: typing___Optional[type___NdArray] = None, - scores: typing___Optional[type___NdArray] = None, - data: typing___Optional[type___NdArray] = None, - metadata: typing___Optional[type___NdArray] = None, - ) -> None: ... - def HasField( - self, - field_name: typing_extensions___Literal[ - "data", b"data", "ids", b"ids", "metadata", b"metadata", "scores", b"scores" - ], - ) -> builtin___bool: ... - def ClearField( - self, - field_name: typing_extensions___Literal[ - "data", b"data", "ids", b"ids", "metadata", b"metadata", "scores", b"scores" - ], - ) -> None: ... - -type___ScoredResults = ScoredResults - -class UpsertRequest(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - namespace: typing___Text = ... - ids: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] = ... - - @property - def data(self) -> type___NdArray: ... - @property - def metadata( - self, - ) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[ - google___protobuf___struct_pb2___Struct - ]: ... - def __init__( - self, - *, - namespace: typing___Optional[typing___Text] = None, - ids: typing___Optional[typing___Iterable[typing___Text]] = None, - data: typing___Optional[type___NdArray] = None, - metadata: typing___Optional[typing___Iterable[google___protobuf___struct_pb2___Struct]] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal["data", b"data"]) -> builtin___bool: ... - def ClearField( - self, - field_name: typing_extensions___Literal[ - "data", b"data", "ids", b"ids", "metadata", b"metadata", "namespace", b"namespace" - ], - ) -> None: ... - -type___UpsertRequest = UpsertRequest - -class UpsertResponse(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - upserted_count: builtin___int = ... - - def __init__( - self, - *, - upserted_count: typing___Optional[builtin___int] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal["upserted_count", b"upserted_count"]) -> None: ... - -type___UpsertResponse = UpsertResponse - -class DeleteRequest(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - namespace: typing___Text = ... - ids: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] = ... - delete_all: builtin___bool = ... - - def __init__( - self, - *, - namespace: typing___Optional[typing___Text] = None, - ids: typing___Optional[typing___Iterable[typing___Text]] = None, - delete_all: typing___Optional[builtin___bool] = None, - ) -> None: ... - def ClearField( - self, - field_name: typing_extensions___Literal["delete_all", b"delete_all", "ids", b"ids", "namespace", b"namespace"], - ) -> None: ... - -type___DeleteRequest = DeleteRequest - -class DeleteResponse(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - - def __init__( - self, - ) -> None: ... - -type___DeleteResponse = DeleteResponse - -class FetchRequest(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - namespace: typing___Text = ... - ids: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] = ... - - def __init__( - self, - *, - namespace: typing___Optional[typing___Text] = None, - ids: typing___Optional[typing___Iterable[typing___Text]] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal["ids", b"ids", "namespace", b"namespace"]) -> None: ... - -type___FetchRequest = FetchRequest - -class FetchResponse(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - namespace: typing___Text = ... - ids: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] = ... - - @property - def vectors( - self, - ) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___NdArray]: ... - @property - def metadata( - self, - ) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[ - google___protobuf___struct_pb2___Struct - ]: ... - def __init__( - self, - *, - namespace: typing___Optional[typing___Text] = None, - ids: typing___Optional[typing___Iterable[typing___Text]] = None, - vectors: typing___Optional[typing___Iterable[type___NdArray]] = None, - metadata: typing___Optional[typing___Iterable[google___protobuf___struct_pb2___Struct]] = None, - ) -> None: ... - def ClearField( - self, - field_name: typing_extensions___Literal[ - "ids", b"ids", "metadata", b"metadata", "namespace", b"namespace", "vectors", b"vectors" - ], - ) -> None: ... - -type___FetchResponse = FetchResponse - -class QueryRequest(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - namespace: typing___Text = ... - namespace_overrides: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] = ... - top_k: builtin___int = ... - top_k_overrides: google___protobuf___internal___containers___RepeatedScalarFieldContainer[builtin___int] = ... - include_values: builtin___bool = ... - include_metadata: builtin___bool = ... - - @property - def filter(self) -> google___protobuf___struct_pb2___Struct: ... - @property - def filter_overrides( - self, - ) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[ - google___protobuf___struct_pb2___Struct - ]: ... - @property - def queries(self) -> type___NdArray: ... - def __init__( - self, - *, - namespace: typing___Optional[typing___Text] = None, - namespace_overrides: typing___Optional[typing___Iterable[typing___Text]] = None, - top_k: typing___Optional[builtin___int] = None, - top_k_overrides: typing___Optional[typing___Iterable[builtin___int]] = None, - filter: typing___Optional[google___protobuf___struct_pb2___Struct] = None, - filter_overrides: typing___Optional[typing___Iterable[google___protobuf___struct_pb2___Struct]] = None, - include_values: typing___Optional[builtin___bool] = None, - include_metadata: typing___Optional[builtin___bool] = None, - queries: typing___Optional[type___NdArray] = None, - ) -> None: ... - def HasField( - self, field_name: typing_extensions___Literal["filter", b"filter", "queries", b"queries"] - ) -> builtin___bool: ... - def ClearField( - self, - field_name: typing_extensions___Literal[ - "filter", - b"filter", - "filter_overrides", - b"filter_overrides", - "include_metadata", - b"include_metadata", - "include_values", - b"include_values", - "namespace", - b"namespace", - "namespace_overrides", - b"namespace_overrides", - "queries", - b"queries", - "top_k", - b"top_k", - "top_k_overrides", - b"top_k_overrides", - ], - ) -> None: ... - -type___QueryRequest = QueryRequest - -class QueryResponse(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - - @property - def matches( - self, - ) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[type___ScoredResults]: ... - def __init__( - self, - *, - matches: typing___Optional[typing___Iterable[type___ScoredResults]] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal["matches", b"matches"]) -> None: ... - -type___QueryResponse = QueryResponse - -class DescribeIndexStatsRequest(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - - def __init__( - self, - ) -> None: ... - -type___DescribeIndexStatsRequest = DescribeIndexStatsRequest - -class NamespaceSummary(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - vector_count: builtin___int = ... - - def __init__( - self, - *, - vector_count: typing___Optional[builtin___int] = None, - ) -> None: ... - def ClearField(self, field_name: typing_extensions___Literal["vector_count", b"vector_count"]) -> None: ... - -type___NamespaceSummary = NamespaceSummary - -class DescribeIndexStatsResponse(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - - class NamespacesEntry(google___protobuf___message___Message): - DESCRIPTOR: google___protobuf___descriptor___Descriptor = ... - key: typing___Text = ... - - @property - def value(self) -> type___NamespaceSummary: ... - def __init__( - self, - *, - key: typing___Optional[typing___Text] = None, - value: typing___Optional[type___NamespaceSummary] = None, - ) -> None: ... - def HasField(self, field_name: typing_extensions___Literal["value", b"value"]) -> builtin___bool: ... - def ClearField(self, field_name: typing_extensions___Literal["key", b"key", "value", b"value"]) -> None: ... - type___NamespacesEntry = NamespacesEntry - - dimension: builtin___int = ... - - @property - def namespaces(self) -> typing___MutableMapping[typing___Text, type___NamespaceSummary]: ... - def __init__( - self, - *, - namespaces: typing___Optional[typing___Mapping[typing___Text, type___NamespaceSummary]] = None, - dimension: typing___Optional[builtin___int] = None, - ) -> None: ... - def ClearField( - self, field_name: typing_extensions___Literal["dimension", b"dimension", "namespaces", b"namespaces"] - ) -> None: ... - -type___DescribeIndexStatsResponse = DescribeIndexStatsResponse diff --git a/pinecone/core/grpc/protos/vector_column_service_pb2_grpc.py b/pinecone/core/grpc/protos/vector_column_service_pb2_grpc.py deleted file mode 100644 index 677e12b4..00000000 --- a/pinecone/core/grpc/protos/vector_column_service_pb2_grpc.py +++ /dev/null @@ -1,267 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc - -import pinecone.core.grpc.protos.vector_column_service_pb2 as vector__column__service__pb2 - - -class VectorColumnServiceStub(object): - """The `VectorColumnService` interface is exposed by Pinecone vector index services. - The `Upsert` operation is for uploading the data (the vector ids and values) to be indexed. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.Upsert = channel.unary_unary( - "/pinecone_columnar.VectorColumnService/Upsert", - request_serializer=vector__column__service__pb2.UpsertRequest.SerializeToString, - response_deserializer=vector__column__service__pb2.UpsertResponse.FromString, - ) - self.Delete = channel.unary_unary( - "/pinecone_columnar.VectorColumnService/Delete", - request_serializer=vector__column__service__pb2.DeleteRequest.SerializeToString, - response_deserializer=vector__column__service__pb2.DeleteResponse.FromString, - ) - self.Fetch = channel.unary_unary( - "/pinecone_columnar.VectorColumnService/Fetch", - request_serializer=vector__column__service__pb2.FetchRequest.SerializeToString, - response_deserializer=vector__column__service__pb2.FetchResponse.FromString, - ) - self.Query = channel.unary_unary( - "/pinecone_columnar.VectorColumnService/Query", - request_serializer=vector__column__service__pb2.QueryRequest.SerializeToString, - response_deserializer=vector__column__service__pb2.QueryResponse.FromString, - ) - self.DescribeIndexStats = channel.unary_unary( - "/pinecone_columnar.VectorColumnService/DescribeIndexStats", - request_serializer=vector__column__service__pb2.DescribeIndexStatsRequest.SerializeToString, - response_deserializer=vector__column__service__pb2.DescribeIndexStatsResponse.FromString, - ) - - -class VectorColumnServiceServicer(object): - """The `VectorColumnService` interface is exposed by Pinecone vector index services. - The `Upsert` operation is for uploading the data (the vector ids and values) to be indexed. - """ - - def Upsert(self, request, context): - """If a user upserts a new value for an existing vector id, it overwrites the previous value.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def Delete(self, request, context): - """The `Delete` operation deletes multiple vectors ids from a single namespace. - Specifying `delete_all` will delete all vectors from the default namespace. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def Fetch(self, request, context): - """The `Fetch` operation returns a vector value by id.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def Query(self, request, context): - """The `Query` operation queries the index for the nearest stored vectors to one - or more query vectors, and returns their ids and/or values. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DescribeIndexStats(self, request, context): - """The `DescribeIndexStats` operation returns summary statistics about the index contents.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_VectorColumnServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "Upsert": grpc.unary_unary_rpc_method_handler( - servicer.Upsert, - request_deserializer=vector__column__service__pb2.UpsertRequest.FromString, - response_serializer=vector__column__service__pb2.UpsertResponse.SerializeToString, - ), - "Delete": grpc.unary_unary_rpc_method_handler( - servicer.Delete, - request_deserializer=vector__column__service__pb2.DeleteRequest.FromString, - response_serializer=vector__column__service__pb2.DeleteResponse.SerializeToString, - ), - "Fetch": grpc.unary_unary_rpc_method_handler( - servicer.Fetch, - request_deserializer=vector__column__service__pb2.FetchRequest.FromString, - response_serializer=vector__column__service__pb2.FetchResponse.SerializeToString, - ), - "Query": grpc.unary_unary_rpc_method_handler( - servicer.Query, - request_deserializer=vector__column__service__pb2.QueryRequest.FromString, - response_serializer=vector__column__service__pb2.QueryResponse.SerializeToString, - ), - "DescribeIndexStats": grpc.unary_unary_rpc_method_handler( - servicer.DescribeIndexStats, - request_deserializer=vector__column__service__pb2.DescribeIndexStatsRequest.FromString, - response_serializer=vector__column__service__pb2.DescribeIndexStatsResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler("pinecone_columnar.VectorColumnService", rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) - - -# This class is part of an EXPERIMENTAL API. -class VectorColumnService(object): - """The `VectorColumnService` interface is exposed by Pinecone vector index services. - The `Upsert` operation is for uploading the data (the vector ids and values) to be indexed. - """ - - @staticmethod - def Upsert( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/pinecone_columnar.VectorColumnService/Upsert", - vector__column__service__pb2.UpsertRequest.SerializeToString, - vector__column__service__pb2.UpsertResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def Delete( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/pinecone_columnar.VectorColumnService/Delete", - vector__column__service__pb2.DeleteRequest.SerializeToString, - vector__column__service__pb2.DeleteResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def Fetch( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/pinecone_columnar.VectorColumnService/Fetch", - vector__column__service__pb2.FetchRequest.SerializeToString, - vector__column__service__pb2.FetchResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def Query( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/pinecone_columnar.VectorColumnService/Query", - vector__column__service__pb2.QueryRequest.SerializeToString, - vector__column__service__pb2.QueryResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DescribeIndexStats( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/pinecone_columnar.VectorColumnService/DescribeIndexStats", - vector__column__service__pb2.DescribeIndexStatsRequest.SerializeToString, - vector__column__service__pb2.DescribeIndexStatsResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) diff --git a/pinecone/core/grpc/retry.py b/pinecone/core/grpc/retry.py deleted file mode 100644 index b2718288..00000000 --- a/pinecone/core/grpc/retry.py +++ /dev/null @@ -1,87 +0,0 @@ -import abc -import logging -import random -import time -from typing import Optional, Tuple, NamedTuple - -import grpc - - -_logger = logging.getLogger(__name__) - - -class SleepPolicy(abc.ABC): - @abc.abstractmethod - def sleep(self, try_i: int): - """ - How long to sleep in milliseconds. - :param try_i: the number of retry (starting from zero) - """ - assert try_i >= 0 - - -class ExponentialBackoff(SleepPolicy): - def __init__(self, *, init_backoff_ms: int, max_backoff_ms: int, multiplier: int): - self.init_backoff = random.randint(0, init_backoff_ms) - self.max_backoff = max_backoff_ms - self.multiplier = multiplier - - def sleep(self, try_i: int): - sleep_range = min(self.init_backoff * self.multiplier**try_i, self.max_backoff) - sleep_ms = random.randint(0, sleep_range) - _logger.debug(f"gRPC retry. Sleeping for {sleep_ms}ms") - time.sleep(sleep_ms / 1000) - - -class RetryOnRpcErrorClientInterceptor( - grpc.UnaryUnaryClientInterceptor, - grpc.UnaryStreamClientInterceptor, - grpc.StreamUnaryClientInterceptor, - grpc.StreamStreamClientInterceptor, -): - """gRPC retry. - - Referece: https://github.com/grpc/grpc/issues/19514#issuecomment-531700657 - """ - - def __init__(self, retry_config: "RetryConfig"): - self.max_attempts = retry_config.max_attempts - self.sleep_policy = retry_config.sleep_policy - self.retryable_status = retry_config.retryable_status - - def _is_retryable_error(self, response_or_error): - """Determine if a response is a retryable error.""" - return ( - isinstance(response_or_error, grpc.RpcError) - and "_MultiThreadedRendezvous" not in response_or_error.__class__.__name__ - and response_or_error.code() in self.retryable_status - ) - - def _intercept_call(self, continuation, client_call_details, request_or_iterator): - response = None - for try_i in range(self.max_attempts): - response = continuation(client_call_details, request_or_iterator) - if not self._is_retryable_error(response): - break - self.sleep_policy.sleep(try_i) - return response - - def intercept_unary_unary(self, continuation, client_call_details, request): - return self._intercept_call(continuation, client_call_details, request) - - def intercept_unary_stream(self, continuation, client_call_details, request): - return self._intercept_call(continuation, client_call_details, request) - - def intercept_stream_unary(self, continuation, client_call_details, request_iterator): - return self._intercept_call(continuation, client_call_details, request_iterator) - - def intercept_stream_stream(self, continuation, client_call_details, request_iterator): - return self._intercept_call(continuation, client_call_details, request_iterator) - - -class RetryConfig(NamedTuple): - """Config settings related to retry""" - - max_attempts: int = 4 - sleep_policy: SleepPolicy = ExponentialBackoff(init_backoff_ms=100, max_backoff_ms=1600, multiplier=2) - retryable_status: Optional[Tuple[grpc.StatusCode, ...]] = (grpc.StatusCode.UNAVAILABLE,) diff --git a/pinecone/core/utils/__init__.py b/pinecone/core/utils/__init__.py deleted file mode 100644 index d4cca164..00000000 --- a/pinecone/core/utils/__init__.py +++ /dev/null @@ -1,136 +0,0 @@ -import inspect -import logging -import re -import uuid -import warnings -from pathlib import Path -from typing import List - -import requests -import urllib3 - -try: - from pinecone.core.grpc.protos import vector_column_service_pb2 - from google.protobuf.struct_pb2 import Struct - from google.protobuf import json_format - import numpy as np - import lz4.frame -except Exception: - pass # ignore for non-[grpc] installations - -DNS_COMPATIBLE_REGEX = re.compile("^[a-z0-9]([a-z0-9]|[-])+[a-z0-9]$") - - -def dump_numpy_public(np_array: "np.ndarray", compressed: bool = False) -> "vector_column_service_pb2.NdArray": - """ - Dump numpy array to vector_column_service_pb2.NdArray - """ - warn_deprecated( - "dump_numpy_public and all numpy-related features will be removed in a future version", - deprecated_in="2.2.1", - removal_in="3.0.0", - ) - protobuf_arr = vector_column_service_pb2.NdArray() - protobuf_arr.dtype = str(np_array.dtype) - protobuf_arr.shape.extend(np_array.shape) - if compressed: - protobuf_arr.buffer = lz4.frame.compress(np_array.tobytes()) - protobuf_arr.compressed = True - else: - protobuf_arr.buffer = np_array.tobytes() - return protobuf_arr - - -def dump_strings_public(strs: List[str], compressed: bool = False) -> "vector_column_service_pb2.NdArray": - warn_deprecated( - "dump_strings_public and all numpy-related features will be removed in a future version", - deprecated_in="2.2.1", - removal_in="3.0.0", - ) - return dump_numpy_public(np.array(strs, dtype="S"), compressed=compressed) - - -def get_version(): - return Path(__file__).parent.parent.parent.joinpath("__version__").read_text().strip() - - -def get_environment(): - return Path(__file__).parent.parent.parent.joinpath("__environment__").read_text().strip() - - -def validate_dns_name(name): - if not DNS_COMPATIBLE_REGEX.match(name): - raise ValueError( - "{} is invalid - service names and node names must consist of lower case " - "alphanumeric characters or '-', start with an alphabetic character, and end with an " - "alphanumeric character (e.g. 'my-name', or 'abc-123')".format(name) - ) - - -def _generate_request_id() -> str: - return str(uuid.uuid4()) - - -def fix_tuple_length(t, n): - """Extend tuple t to length n by adding None items at the end of the tuple. Return the new tuple.""" - return t + ((None,) * (n - len(t))) if len(t) < n else t - - -def get_user_agent(): - client_id = f"python-client-{get_version()}" - user_agent_details = {"requests": requests.__version__, "urllib3": urllib3.__version__} - user_agent = "{} ({})".format(client_id, ", ".join([f"{k}:{v}" for k, v in user_agent_details.items()])) - return user_agent - - -def dict_to_proto_struct(d: dict) -> "Struct": - if not d: - d = {} - s = Struct() - s.update(d) - return s - - -def proto_struct_to_dict(s: "Struct") -> dict: - return json_format.MessageToDict(s) - - -def load_numpy_public(proto_arr: "vector_column_service_pb2.NdArray") -> "np.ndarray": - """ - Load numpy array from protobuf - :param proto_arr: - :return: - """ - warn_deprecated( - "load_numpy_public and all numpy-related features will be removed in a future version", - deprecated_in="2.2.1", - removal_in="3.0.0", - ) - if len(proto_arr.shape) == 0: - return np.array([]) - if proto_arr.compressed: - numpy_arr = np.frombuffer(lz4.frame.decompress(proto_arr.buffer), dtype=proto_arr.dtype) - else: - numpy_arr = np.frombuffer(proto_arr.buffer, dtype=proto_arr.dtype) - return numpy_arr.reshape(proto_arr.shape) - - -def load_strings_public(proto_arr: "vector_column_service_pb2.NdArray") -> List[str]: - warn_deprecated( - "load_strings_public and all numpy-related features will be removed in a future version", - deprecated_in="2.2.1", - removal_in="3.0.0", - ) - return [str(item, "utf-8") for item in load_numpy_public(proto_arr)] - - -def warn_deprecated(description: str = "", deprecated_in: str = None, removal_in: str = None): - message = f"DEPRECATED since v{deprecated_in} [Will be removed in v{removal_in}]: {description}" - warnings.warn(message, FutureWarning) - - -def check_kwargs(caller, given): - argspec = inspect.getfullargspec(caller) - diff = set(given).difference(argspec.args) - if diff: - logging.exception(caller.__name__ + " had unexpected keyword argument(s): " + ", ".join(diff), exc_info=False) diff --git a/pinecone/core/utils/constants.py b/pinecone/core/utils/constants.py deleted file mode 100644 index ca9d9631..00000000 --- a/pinecone/core/utils/constants.py +++ /dev/null @@ -1,38 +0,0 @@ -import os -import enum - -from pinecone.core.utils import get_environment, get_version - -PARENT_LOGGER_NAME = "pinecone" -DEFAULT_PARENT_LOGGER_LEVEL = "ERROR" - -MAX_MSG_SIZE = 128 * 1024 * 1024 - -MAX_ID_LENGTH = int(os.getenv("PINECONE_MAX_ID_LENGTH", default="64")) - -REQUEST_ID: str = "request_id" -CLIENT_VERSION_HEADER = "X-Pinecone-Client-Version" - - -class NodeType(str, enum.Enum): - STANDARD = "STANDARD" - COMPUTE = "COMPUTE" - MEMORY = "MEMORY" - STANDARD2X = "STANDARD2X" - COMPUTE2X = "COMPUTE2X" - MEMORY2X = "MEMORY2X" - STANDARD4X = "STANDARD4X" - COMPUTE4X = "COMPUTE4X" - MEMORY4X = "MEMORY4X" - - -PACKAGE_ENVIRONMENT = get_environment() or "development" -CLIENT_VERSION = get_version() -CLIENT_ID = f"python-client-{CLIENT_VERSION}" - -TCP_KEEPINTVL = 60 # Sec -TCP_KEEPIDLE = 300 # Sec -TCP_KEEPCNT = 4 - -REQUIRED_VECTOR_FIELDS = {"id", "values"} -OPTIONAL_VECTOR_FIELDS = {"sparse_values", "metadata"} diff --git a/pinecone/core/utils/error_handling.py b/pinecone/core/utils/error_handling.py deleted file mode 100644 index 6d3f5405..00000000 --- a/pinecone/core/utils/error_handling.py +++ /dev/null @@ -1,28 +0,0 @@ -import inspect -from functools import wraps - -from urllib3.exceptions import MaxRetryError, ProtocolError - -from pinecone import Config, PineconeProtocolError - - -def validate_and_convert_errors(func): - @wraps(func) - def inner_func(*args, **kwargs): - Config.validate() # raises exceptions in case of invalid config - try: - return func(*args, **kwargs) - except MaxRetryError as e: - if isinstance(e.reason, ProtocolError): - raise PineconeProtocolError( - f"Failed to connect to {e.url}; did you specify the correct index name?" - ) from e - else: - raise - except ProtocolError as e: - raise PineconeProtocolError(f"Failed to connect; did you specify the correct index name?") from e - - # Override signature - sig = inspect.signature(func) - inner_func.__signature__ = sig - return inner_func