diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 3f7b73f..424739d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1,8 +1,6 @@ name: ci on: - pull_request: - branches: - - main + pull_request: {} jobs: build: @@ -36,7 +34,6 @@ jobs: run: | go get ./pinecone - name: Run tests - continue-on-error: true run: go test -count=1 -v ./pinecone env: PINECONE_API_KEY: ${{ secrets.API_KEY }} diff --git a/README.md b/README.md index 1fdf2a9..68d8552 100644 --- a/README.md +++ b/README.md @@ -579,6 +579,70 @@ func main() { } ``` +### Import vectors from object storage + +You can now [import vectors en masse](https://docs.pinecone.io/guides/data/understanding-imports) from object +storage. `Import` is a long-running, asynchronous operation that imports large numbers of records into a Pinecone +serverless index. + +In order to import vectors from object storage, they must be stored in Parquet files and adhere to the necessary +[file format](https://docs.pinecone.io/guides/data/understanding-imports#parquet-file-format). Your object storage +must also adhere to the necessary [directory structure](https://docs.pinecone.io/guides/data/understanding-imports#directory-structure). + +The following example imports vectors from an Amazon S3 bucket into a Pinecone serverless index: + +```go + ctx := context.Background() + + clientParams := pinecone.NewClientParams{ + ApiKey: os.Getenv("PINECONE_API_KEY"), + } + + pc, err := pinecone.NewClient(clientParams) + + if err != nil { + log.Fatalf("Failed to create Client: %v", err) + } + + indexName := "sample-index" + + idx, err := pc.CreateServerlessIndex(ctx, &pinecone.CreateServerlessIndexRequest{ + Name: indexName, + Dimension: 3, + Metric: pinecone.Cosine, + Cloud: pinecone.Aws, + Region: "us-east-1", + }) + + if err != nil { + log.Fatalf("Failed to create serverless index: %v", err) + } + + idx, err = pc.DescribeIndex(ctx, "pinecone-index") + + if err != nil { + log.Fatalf("Failed to describe index \"%v\": %v", idx.Name, err) + } + + idxConnection, err := pc.Index(pinecone.NewIndexConnParams{Host: idx.Host}) + if err != nil { + log.Fatalf("Failed to create IndexConnection for Host: %v: %v", idx.Host, err) + } + + storageURI := "s3://my-bucket/my-directory/" + + errorMode := "abort" // Will abort if error encountered; other option: "continue" + + importRes, err := idxConnection.StartImport(ctx, storageURI, nil, (*pinecone.ImportErrorMode)(&errorMode)) + + if err != nil { + log.Fatalf("Failed to start import: %v", err) + } + + fmt.Printf("import started with ID: %s", importRes.Id) +``` +You can [start, cancel, and check the status](https://docs.pinecone.io/guides/data/import-data) of all or one import operation(s). + ### Query an index #### Query by vector values @@ -1307,13 +1371,17 @@ func main() { ## Inference -The `Client` object has an `Inference` namespace which allows interacting with Pinecone's [Inference API](https://docs.pinecone.io/reference/api/2024-07/inference/generate-embeddings). The Inference API is a service that gives you access to embedding models hosted on Pinecone's infrastructure. Read more at [Understanding Pinecone Inference](https://docs.pinecone.io/guides/inference/understanding-inference). +The `Client` object has an `Inference` namespace which allows interacting with +Pinecone's [Inference API](https://docs.pinecone.io/reference/api/2024-07/inference/generate-embeddings). The Inference +API is a service that gives you access to embedding models hosted on Pinecone's infrastructure. Read more +at [Understanding Pinecone Inference](https://docs.pinecone.io/guides/inference/understanding-inference). **Notes:** Models currently supported: -- [multilingual-e5-large](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) +- Embedding: [multilingual-e5-large](https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models) +- Reranking: [bge-reranker-v2-m3](https://docs.pinecone.io/models/bge-reranker-v2-m3) ### Create Embeddings @@ -1368,11 +1436,67 @@ Send text to Pinecone's inference API to generate embeddings for documents and q } fmt.Printf("query embedding response: %+v", queryEmbeddingsResponse) - // << Send query to Pinecone to retrieve similar documents >> + // << Send query to Pinecone to retrieve similar documents >> +``` + +### Rerank documents + +Rerank documents in descending relevance-order against a query. + +**Note:** The `score` represents the absolute measure of relevance of a given query and passage pair. Normalized +between [0, 1], the `score` represents how closely relevant a specific item and query are, with scores closer to 1 +indicating higher relevance. + +```go + ctx := context.Background() + + pc, err := pinecone.NewClient(pinecone.NewClientParams{ + ApiKey: "YOUR-API-KEY" + }) + if err != nil { + log.Fatalf("Failed to create Client: %v", err) + } + + rerankModel := "bge-reranker-v2-m3" + query := "What are some good Turkey dishes for Thanksgiving?" + + documents := []pinecone.Document{ + {"title": "Turkey Sandwiches", "body": "Turkey is a classic meat to eat at American Thanksgiving."}, + {"title": "Lemon Turkey", "body": "A lemon brined Turkey with apple sausage stuffing is a classic Thanksgiving main course."}, + {"title": "Thanksgiving", "body": "My favorite Thanksgiving dish is pumpkin pie"}, + {"title": "Protein Sources", "body": "Turkey is a great source of protein."}, + } + + // Optional arguments + topN := 3 + returnDocuments := false + rankFields := []string{"body"} + modelParams := map[string]string{ + "truncate": "END", + } + + rerankRequest := pinecone.RerankRequest{ + Model: rerankModel, + Query: query, + Documents: documents, + TopN: &topN, + ReturnDocuments: &returnDocuments, + RankFields: &rankFields, + Parameters: &modelParams, + } + + rerankResponse, err := pc.Inference.Rerank(ctx, &rerankRequest) + + if err != nil { + log.Fatalf("Failed to rerank documents: %v", err) + } + + fmt.Printf("rerank response: %+v", rerankResponse) ``` ## Support -To get help using go-pinecone you can file an issue on [GitHub](https://github.com/pinecone-io/go-pinecone/issues), visit the [community forum](https://community.pinecone.io/), +To get help using go-pinecone you can file an issue on [GitHub](https://github.com/pinecone-io/go-pinecone/issues), +visit the [community forum](https://community.pinecone.io/), or reach out to support@pinecone.io. diff --git a/codegen/apis b/codegen/apis index 062b114..39e90e2 160000 --- a/codegen/apis +++ b/codegen/apis @@ -1 +1 @@ -Subproject commit 062b114b6d7b016de2b4d2b68c211a81b8689d1a +Subproject commit 39e90e26073686ed3a46d3db1e8b91e845bde90c diff --git a/codegen/build-clients.sh b/codegen/build-clients.sh index 0a4bfd2..be42e60 100755 --- a/codegen/build-clients.sh +++ b/codegen/build-clients.sh @@ -1,15 +1,30 @@ #!/bin/bash -set -eux -o pipefail - version=$1 # e.g. 2024-07 -# data_destination must align with the option go_package: -# https://github.com/pinecone-io/apis/blob/e9b47c76f649656002f4911946ca6c4c4a6f04fc/src/release/data/data.proto#L3 -data_destination="internal/gen/data" -control_destination="internal/gen/control" +# modules +db_control_module="db_control" +db_data_module="db_data" +inference_module="inference" + +# generated grpc output destination paths +# db_data_destination must align with the option go_package in the proto file: +# https://github.com/pinecone-io/apis/blob/d1d005e75cc9fe9a5c486ef9218fe87b57765961/src/release/db/data/data.proto#L3 +db_data_destination="internal/gen/${db_data_module}" +db_control_destination="internal/gen/${db_control_module}" +inference_destination="internal/gen/${inference_module}" + +# version file version_file="internal/gen/api_version.go" +# generated oas file destination paths +db_data_rest_destination="${db_data_destination}/rest" +db_data_oas_file="${db_data_rest_destination}/${db_data_module}_${version}.oas.go" +db_control_oas_file="${db_control_destination}/${db_control_module}_${version}.oas.go" +inference_oas_file="${inference_destination}/${inference_module}_${version}.oas.go" + +set -eux -o pipefail + update_apis_repo() { echo "Updating apis repo" pushd codegen/apis @@ -27,18 +42,35 @@ verify_spec_version() { echo "Version is required" exit 1 fi + + verify_directory_exists "codegen/apis/_build/${version}" +} + +verify_directory_exists() { + local directory=$1 + if [ ! -d "$directory" ]; then + echo "Directory does not exist at $directory" + exit 1 + fi } generate_oas_client() { - oas_file="codegen/apis/_build/${version}/control_${version}.oas.yaml" + local module=$1 + local destination=$2 + + # source oas file for module and version + oas_file="codegen/apis/_build/${version}/${module}_${version}.oas.yaml" - oapi-codegen --package=control \ + oapi-codegen --package=${module} \ --generate types,client \ - "${oas_file}" > "${control_destination}/control_plane.oas.go" + "${oas_file}" > "${destination}" } generate_proto_client() { - proto_file="codegen/apis/_build/${version}/data_${version}.proto" + local module=$1 + + # source proto file for module and version + proto_file="codegen/apis/_build/${version}/${module}_${version}.proto" protoc --experimental_allow_proto3_optional \ --proto_path=codegen/apis/vendor/protos \ @@ -63,19 +95,27 @@ EOL update_apis_repo verify_spec_version $version -# Generate control plane client code -rm -rf "${control_destination}" -mkdir -p "${control_destination}" +# Clear internal/gen/* contents +rm -rf internal/gen/* + +# Generate db_control oas client +rm -rf "${db_control_destination}" +mkdir -p "${db_control_destination}" +generate_oas_client $db_control_module $db_control_oas_file -generate_oas_client +# Generate inference oas client +rm -rf "${inference_destination}" +mkdir -p "${inference_destination}" +generate_oas_client $inference_module $inference_oas_file -# Generate data plane client code -rm -rf "${data_destination}" -mkdir -p "${data_destination}" +# Generate db_data oas and proto clients +rm -rf "${db_data_destination}" +mkdir -p "${db_data_destination}" +mkdir -p "${db_data_rest_destination}" -generate_proto_client +generate_oas_client $db_data_module $db_data_oas_file +generate_proto_client $db_data_module # Generate version file rm -rf "${version_file}" - generate_version_file \ No newline at end of file diff --git a/internal/gen/api_version.go b/internal/gen/api_version.go index e971954..45dce50 100644 --- a/internal/gen/api_version.go +++ b/internal/gen/api_version.go @@ -1,4 +1,4 @@ // Code generated by build-clients.sh - DO NOT EDIT. package gen -const PineconeApiVersion = "2024-07" +const PineconeApiVersion = "2024-10" diff --git a/internal/gen/control/control_plane.oas.go b/internal/gen/db_control/db_control_2024-10.oas.go similarity index 91% rename from internal/gen/control/control_plane.oas.go rename to internal/gen/db_control/db_control_2024-10.oas.go index c8bdb49..6c3ea35 100644 --- a/internal/gen/control/control_plane.oas.go +++ b/internal/gen/db_control/db_control_2024-10.oas.go @@ -1,7 +1,7 @@ -// Package control provides primitives to interact with the openapi HTTP API. +// Package db_control provides primitives to interact with the openapi HTTP API. // // Code generated by github.com/oapi-codegen/oapi-codegen/v2 version v2.3.0 DO NOT EDIT. -package control +package db_control import ( "bytes" @@ -53,6 +53,7 @@ const ( NOTFOUND ErrorResponseErrorCode = "NOT_FOUND" OK ErrorResponseErrorCode = "OK" OUTOFRANGE ErrorResponseErrorCode = "OUT_OF_RANGE" + PAYMENTREQUIRED ErrorResponseErrorCode = "PAYMENT_REQUIRED" PERMISSIONDENIED ErrorResponseErrorCode = "PERMISSION_DENIED" QUOTAEXCEEDED ErrorResponseErrorCode = "QUOTA_EXCEEDED" RESOURCEEXHAUSTED ErrorResponseErrorCode = "RESOURCE_EXHAUSTED" @@ -131,6 +132,9 @@ type ConfigureIndexRequest struct { Replicas *int32 `json:"replicas,omitempty"` } `json:"pod"` } `json:"spec,omitempty"` + + // Tags Custom user tags added to an index. Keys must be alphanumeric and 80 characters or less. Values must be 120 characters or less. + Tags *IndexTags `json:"tags"` } // CreateCollectionRequest The configuration needed to create a Pinecone collection. @@ -160,6 +164,9 @@ type CreateIndexRequest struct { // // For serverless indexes, you define only the [cloud and region](http://docs.pinecone.io/guides/indexes/understanding-indexes#cloud-regions) where the index should be hosted. For pod-based indexes, you define the [environment](http://docs.pinecone.io/guides/indexes/understanding-indexes#pod-environments) where the index should be hosted, the [pod type and size](http://docs.pinecone.io/guides/indexes/understanding-indexes#pod-types) to use, and other index characteristics. Spec IndexSpec `json:"spec"` + + // Tags Custom user tags added to an index. Keys must be alphanumeric and 80 characters or less. Values must be 120 characters or less. + Tags *IndexTags `json:"tags"` } // CreateIndexRequestMetric The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. @@ -168,40 +175,6 @@ type CreateIndexRequestMetric string // DeletionProtection Whether [deletion protection](http://docs.pinecone.io/guides/indexes/prevent-index-deletion) is enabled/disabled for the index. type DeletionProtection string -// EmbedRequest Generate embeddings for inputs -type EmbedRequest struct { - Inputs []struct { - Text *string `json:"text,omitempty"` - } `json:"inputs"` - Model string `json:"model"` - - // Parameters Model-specific parameters. - Parameters *struct { - // InputType Common property used to distinguish between types of data. - InputType *string `json:"input_type,omitempty"` - - // Truncate How to handle inputs longer than those supported by the model. If NONE, when the input exceeds the maximum input token length an error will be returned. - Truncate *string `json:"truncate,omitempty"` - } `json:"parameters,omitempty"` -} - -// Embedding Embedding of a single input -type Embedding struct { - // Values The embedding values. - Values *[]float32 `json:"values,omitempty"` -} - -// EmbeddingsList Embeddings generated for the input -type EmbeddingsList struct { - Data *[]Embedding `json:"data,omitempty"` - Model *string `json:"model,omitempty"` - - // Usage Usage statistics for model inference including any instruction prefixes - Usage *struct { - TotalTokens *int `json:"total_tokens,omitempty"` - } `json:"usage,omitempty"` -} - // ErrorResponse The response shape used for all error responses. type ErrorResponse struct { // Error Detailed information about the error that occurred. @@ -252,6 +225,9 @@ type IndexModel struct { Ready bool `json:"ready"` State IndexModelStatusState `json:"state"` } `json:"status"` + + // Tags Custom user tags added to an index. Keys must be alphanumeric and 80 characters or less. Values must be 120 characters or less. + Tags *IndexTags `json:"tags"` } // IndexModelMetric The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. @@ -278,6 +254,9 @@ type IndexSpec0 = interface{} // IndexSpec1 defines model for . type IndexSpec1 = interface{} +// IndexTags Custom user tags added to an index. Keys must be alphanumeric and 80 characters or less. Values must be 120 characters or less. +type IndexTags map[string]string + // PodSpec Configuration needed to deploy a pod-based index. type PodSpec struct { // Environment The environment where the index is hosted. @@ -293,13 +272,13 @@ type PodSpec struct { PodType string `json:"pod_type"` // Pods The number of pods to be used in the index. This should be equal to `shards` x `replicas`.' - Pods int `json:"pods"` + Pods *int `json:"pods,omitempty"` // Replicas The number of replicas. Replicas duplicate your index. They provide higher availability and throughput. Replicas can be scaled up or down as your needs change. - Replicas int32 `json:"replicas"` + Replicas *int32 `json:"replicas,omitempty"` // Shards The number of shards. Shards split your data across multiple pods so you can fit more data into an index. - Shards int32 `json:"shards"` + Shards *int32 `json:"shards,omitempty"` // SourceCollection The name of the collection to be used as the source for the index. SourceCollection *string `json:"source_collection,omitempty"` @@ -320,9 +299,6 @@ type ServerlessSpecCloud string // CreateCollectionJSONRequestBody defines body for CreateCollection for application/json ContentType. type CreateCollectionJSONRequestBody = CreateCollectionRequest -// EmbedJSONRequestBody defines body for Embed for application/json ContentType. -type EmbedJSONRequestBody = EmbedRequest - // CreateIndexJSONRequestBody defines body for CreateIndex for application/json ContentType. type CreateIndexJSONRequestBody = CreateIndexRequest @@ -526,11 +502,6 @@ type ClientInterface interface { // DescribeCollection request DescribeCollection(ctx context.Context, collectionName string, reqEditors ...RequestEditorFn) (*http.Response, error) - // EmbedWithBody request with any body - EmbedWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) - - Embed(ctx context.Context, body EmbedJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) - // ListIndexes request ListIndexes(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -611,30 +582,6 @@ func (c *Client) DescribeCollection(ctx context.Context, collectionName string, return c.Client.Do(req) } -func (c *Client) EmbedWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewEmbedRequestWithBody(c.Server, contentType, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) Embed(ctx context.Context, body EmbedJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewEmbedRequest(c.Server, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - func (c *Client) ListIndexes(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { req, err := NewListIndexesRequest(c.Server) if err != nil { @@ -854,46 +801,6 @@ func NewDescribeCollectionRequest(server string, collectionName string) (*http.R return req, nil } -// NewEmbedRequest calls the generic Embed builder with application/json body -func NewEmbedRequest(server string, body EmbedJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewEmbedRequestWithBody(server, "application/json", bodyReader) -} - -// NewEmbedRequestWithBody generates requests for Embed with any type of body -func NewEmbedRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { - var err error - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/embed") - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("POST", queryURL.String(), body) - if err != nil { - return nil, err - } - - req.Header.Add("Content-Type", contentType) - - return req, nil -} - // NewListIndexesRequest generates requests for ListIndexes func NewListIndexesRequest(server string) (*http.Request, error) { var err error @@ -1133,11 +1040,6 @@ type ClientWithResponsesInterface interface { // DescribeCollectionWithResponse request DescribeCollectionWithResponse(ctx context.Context, collectionName string, reqEditors ...RequestEditorFn) (*DescribeCollectionResponse, error) - // EmbedWithBodyWithResponse request with any body - EmbedWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EmbedResponse, error) - - EmbedWithResponse(ctx context.Context, body EmbedJSONRequestBody, reqEditors ...RequestEditorFn) (*EmbedResponse, error) - // ListIndexesWithResponse request ListIndexesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ListIndexesResponse, error) @@ -1188,6 +1090,7 @@ type CreateCollectionResponse struct { JSON201 *CollectionModel JSON400 *ErrorResponse JSON401 *ErrorResponse + JSON402 *ErrorResponse JSON403 *ErrorResponse JSON409 *ErrorResponse JSON422 *ErrorResponse @@ -1259,31 +1162,6 @@ func (r DescribeCollectionResponse) StatusCode() int { return 0 } -type EmbedResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *EmbeddingsList - JSON400 *ErrorResponse - JSON401 *ErrorResponse - JSON500 *ErrorResponse -} - -// Status returns HTTPResponse.Status -func (r EmbedResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r EmbedResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - type ListIndexesResponse struct { Body []byte HTTPResponse *http.Response @@ -1314,6 +1192,7 @@ type CreateIndexResponse struct { JSON201 *IndexModel JSON400 *ErrorResponse JSON401 *ErrorResponse + JSON402 *ErrorResponse JSON403 *ErrorResponse JSON404 *ErrorResponse JSON409 *ErrorResponse @@ -1341,6 +1220,7 @@ type DeleteIndexResponse struct { Body []byte HTTPResponse *http.Response JSON401 *ErrorResponse + JSON403 *ErrorResponse JSON404 *ErrorResponse JSON412 *ErrorResponse JSON500 *ErrorResponse @@ -1393,6 +1273,7 @@ type ConfigureIndexResponse struct { JSON202 *IndexModel JSON400 *ErrorResponse JSON401 *ErrorResponse + JSON402 *ErrorResponse JSON403 *ErrorResponse JSON404 *ErrorResponse JSON422 *ErrorResponse @@ -1459,23 +1340,6 @@ func (c *ClientWithResponses) DescribeCollectionWithResponse(ctx context.Context return ParseDescribeCollectionResponse(rsp) } -// EmbedWithBodyWithResponse request with arbitrary body returning *EmbedResponse -func (c *ClientWithResponses) EmbedWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EmbedResponse, error) { - rsp, err := c.EmbedWithBody(ctx, contentType, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseEmbedResponse(rsp) -} - -func (c *ClientWithResponses) EmbedWithResponse(ctx context.Context, body EmbedJSONRequestBody, reqEditors ...RequestEditorFn) (*EmbedResponse, error) { - rsp, err := c.Embed(ctx, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseEmbedResponse(rsp) -} - // ListIndexesWithResponse request returning *ListIndexesResponse func (c *ClientWithResponses) ListIndexesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ListIndexesResponse, error) { rsp, err := c.ListIndexes(ctx, reqEditors...) @@ -1612,6 +1476,13 @@ func ParseCreateCollectionResponse(rsp *http.Response) (*CreateCollectionRespons } response.JSON401 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 402: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON402 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: var dest ErrorResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { @@ -1732,53 +1603,6 @@ func ParseDescribeCollectionResponse(rsp *http.Response) (*DescribeCollectionRes return response, nil } -// ParseEmbedResponse parses an HTTP response from a EmbedWithResponse call -func ParseEmbedResponse(rsp *http.Response) (*EmbedResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &EmbedResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest EmbeddingsList - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON500 = &dest - - } - - return response, nil -} - // ParseListIndexesResponse parses an HTTP response from a ListIndexesWithResponse call func ParseListIndexesResponse(rsp *http.Response) (*ListIndexesResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) @@ -1854,6 +1678,13 @@ func ParseCreateIndexResponse(rsp *http.Response) (*CreateIndexResponse, error) } response.JSON401 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 402: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON402 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: var dest ErrorResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { @@ -1915,6 +1746,13 @@ func ParseDeleteIndexResponse(rsp *http.Response) (*DeleteIndexResponse, error) } response.JSON401 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: var dest ErrorResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { @@ -2023,6 +1861,13 @@ func ParseConfigureIndexResponse(rsp *http.Response) (*ConfigureIndexResponse, e } response.JSON401 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 402: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON402 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: var dest ErrorResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { diff --git a/internal/gen/data/data_2024-07.pb.go b/internal/gen/db_data/grpc/db_data_2024-10.pb.go similarity index 64% rename from internal/gen/data/data_2024-07.pb.go rename to internal/gen/db_data/grpc/db_data_2024-10.pb.go index f7bdca4..b7bdd83 100644 --- a/internal/gen/data/data_2024-07.pb.go +++ b/internal/gen/db_data/grpc/db_data_2024-10.pb.go @@ -2,9 +2,9 @@ // versions: // protoc-gen-go v1.32.0 // protoc v5.27.1 -// source: data_2024-07.proto +// source: db_data_2024-10.proto -package data +package grpc import ( _ "google.golang.org/genproto/googleapis/api/annotations" @@ -34,7 +34,7 @@ type SparseValues struct { func (x *SparseValues) Reset() { *x = SparseValues{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[0] + mi := &file_db_data_2024_10_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -47,7 +47,7 @@ func (x *SparseValues) String() string { func (*SparseValues) ProtoMessage() {} func (x *SparseValues) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[0] + mi := &file_db_data_2024_10_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -60,7 +60,7 @@ func (x *SparseValues) ProtoReflect() protoreflect.Message { // Deprecated: Use SparseValues.ProtoReflect.Descriptor instead. func (*SparseValues) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{0} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{0} } func (x *SparseValues) GetIndices() []uint32 { @@ -94,7 +94,7 @@ type Vector struct { func (x *Vector) Reset() { *x = Vector{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[1] + mi := &file_db_data_2024_10_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -107,7 +107,7 @@ func (x *Vector) String() string { func (*Vector) ProtoMessage() {} func (x *Vector) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[1] + mi := &file_db_data_2024_10_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -120,7 +120,7 @@ func (x *Vector) ProtoReflect() protoreflect.Message { // Deprecated: Use Vector.ProtoReflect.Descriptor instead. func (*Vector) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{1} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{1} } func (x *Vector) GetId() string { @@ -171,7 +171,7 @@ type ScoredVector struct { func (x *ScoredVector) Reset() { *x = ScoredVector{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[2] + mi := &file_db_data_2024_10_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -184,7 +184,7 @@ func (x *ScoredVector) String() string { func (*ScoredVector) ProtoMessage() {} func (x *ScoredVector) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[2] + mi := &file_db_data_2024_10_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -197,7 +197,7 @@ func (x *ScoredVector) ProtoReflect() protoreflect.Message { // Deprecated: Use ScoredVector.ProtoReflect.Descriptor instead. func (*ScoredVector) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{2} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{2} } func (x *ScoredVector) GetId() string { @@ -253,7 +253,7 @@ type RequestUnion struct { func (x *RequestUnion) Reset() { *x = RequestUnion{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[3] + mi := &file_db_data_2024_10_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -266,7 +266,7 @@ func (x *RequestUnion) String() string { func (*RequestUnion) ProtoMessage() {} func (x *RequestUnion) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[3] + mi := &file_db_data_2024_10_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -279,7 +279,7 @@ func (x *RequestUnion) ProtoReflect() protoreflect.Message { // Deprecated: Use RequestUnion.ProtoReflect.Descriptor instead. func (*RequestUnion) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{3} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{3} } func (m *RequestUnion) GetRequestUnionInner() isRequestUnion_RequestUnionInner { @@ -347,7 +347,7 @@ type UpsertRequest struct { func (x *UpsertRequest) Reset() { *x = UpsertRequest{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[4] + mi := &file_db_data_2024_10_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -360,7 +360,7 @@ func (x *UpsertRequest) String() string { func (*UpsertRequest) ProtoMessage() {} func (x *UpsertRequest) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[4] + mi := &file_db_data_2024_10_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -373,7 +373,7 @@ func (x *UpsertRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use UpsertRequest.ProtoReflect.Descriptor instead. func (*UpsertRequest) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{4} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{4} } func (x *UpsertRequest) GetVectors() []*Vector { @@ -403,7 +403,7 @@ type UpsertResponse struct { func (x *UpsertResponse) Reset() { *x = UpsertResponse{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[5] + mi := &file_db_data_2024_10_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -416,7 +416,7 @@ func (x *UpsertResponse) String() string { func (*UpsertResponse) ProtoMessage() {} func (x *UpsertResponse) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[5] + mi := &file_db_data_2024_10_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -429,7 +429,7 @@ func (x *UpsertResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use UpsertResponse.ProtoReflect.Descriptor instead. func (*UpsertResponse) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{5} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{5} } func (x *UpsertResponse) GetUpsertedCount() uint32 { @@ -461,7 +461,7 @@ type DeleteRequest struct { func (x *DeleteRequest) Reset() { *x = DeleteRequest{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[6] + mi := &file_db_data_2024_10_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -474,7 +474,7 @@ func (x *DeleteRequest) String() string { func (*DeleteRequest) ProtoMessage() {} func (x *DeleteRequest) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[6] + mi := &file_db_data_2024_10_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -487,7 +487,7 @@ func (x *DeleteRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use DeleteRequest.ProtoReflect.Descriptor instead. func (*DeleteRequest) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{6} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{6} } func (x *DeleteRequest) GetIds() []string { @@ -528,7 +528,7 @@ type DeleteResponse struct { func (x *DeleteResponse) Reset() { *x = DeleteResponse{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[7] + mi := &file_db_data_2024_10_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -541,7 +541,7 @@ func (x *DeleteResponse) String() string { func (*DeleteResponse) ProtoMessage() {} func (x *DeleteResponse) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[7] + mi := &file_db_data_2024_10_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -554,7 +554,7 @@ func (x *DeleteResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use DeleteResponse.ProtoReflect.Descriptor instead. func (*DeleteResponse) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{7} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{7} } // The request for the `fetch` operation. @@ -571,7 +571,7 @@ type FetchRequest struct { func (x *FetchRequest) Reset() { *x = FetchRequest{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[8] + mi := &file_db_data_2024_10_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -584,7 +584,7 @@ func (x *FetchRequest) String() string { func (*FetchRequest) ProtoMessage() {} func (x *FetchRequest) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[8] + mi := &file_db_data_2024_10_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -597,7 +597,7 @@ func (x *FetchRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use FetchRequest.ProtoReflect.Descriptor instead. func (*FetchRequest) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{8} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{8} } func (x *FetchRequest) GetIds() []string { @@ -631,7 +631,7 @@ type FetchResponse struct { func (x *FetchResponse) Reset() { *x = FetchResponse{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[9] + mi := &file_db_data_2024_10_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -644,7 +644,7 @@ func (x *FetchResponse) String() string { func (*FetchResponse) ProtoMessage() {} func (x *FetchResponse) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[9] + mi := &file_db_data_2024_10_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -657,7 +657,7 @@ func (x *FetchResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use FetchResponse.ProtoReflect.Descriptor instead. func (*FetchResponse) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{9} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{9} } func (x *FetchResponse) GetVectors() map[string]*Vector { @@ -699,7 +699,7 @@ type ListRequest struct { func (x *ListRequest) Reset() { *x = ListRequest{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[10] + mi := &file_db_data_2024_10_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -712,7 +712,7 @@ func (x *ListRequest) String() string { func (*ListRequest) ProtoMessage() {} func (x *ListRequest) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[10] + mi := &file_db_data_2024_10_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -725,7 +725,7 @@ func (x *ListRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use ListRequest.ProtoReflect.Descriptor instead. func (*ListRequest) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{10} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{10} } func (x *ListRequest) GetPrefix() string { @@ -767,7 +767,7 @@ type Pagination struct { func (x *Pagination) Reset() { *x = Pagination{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[11] + mi := &file_db_data_2024_10_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -780,7 +780,7 @@ func (x *Pagination) String() string { func (*Pagination) ProtoMessage() {} func (x *Pagination) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[11] + mi := &file_db_data_2024_10_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -793,7 +793,7 @@ func (x *Pagination) ProtoReflect() protoreflect.Message { // Deprecated: Use Pagination.ProtoReflect.Descriptor instead. func (*Pagination) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{11} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{11} } func (x *Pagination) GetNext() string { @@ -814,7 +814,7 @@ type ListItem struct { func (x *ListItem) Reset() { *x = ListItem{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[12] + mi := &file_db_data_2024_10_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -827,7 +827,7 @@ func (x *ListItem) String() string { func (*ListItem) ProtoMessage() {} func (x *ListItem) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[12] + mi := &file_db_data_2024_10_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -840,7 +840,7 @@ func (x *ListItem) ProtoReflect() protoreflect.Message { // Deprecated: Use ListItem.ProtoReflect.Descriptor instead. func (*ListItem) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{12} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{12} } func (x *ListItem) GetId() string { @@ -869,7 +869,7 @@ type ListResponse struct { func (x *ListResponse) Reset() { *x = ListResponse{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[13] + mi := &file_db_data_2024_10_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -882,7 +882,7 @@ func (x *ListResponse) String() string { func (*ListResponse) ProtoMessage() {} func (x *ListResponse) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[13] + mi := &file_db_data_2024_10_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -895,7 +895,7 @@ func (x *ListResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use ListResponse.ProtoReflect.Descriptor instead. func (*ListResponse) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{13} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{13} } func (x *ListResponse) GetVectors() []*ListItem { @@ -947,7 +947,7 @@ type QueryVector struct { func (x *QueryVector) Reset() { *x = QueryVector{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[14] + mi := &file_db_data_2024_10_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -960,7 +960,7 @@ func (x *QueryVector) String() string { func (*QueryVector) ProtoMessage() {} func (x *QueryVector) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[14] + mi := &file_db_data_2024_10_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -973,7 +973,7 @@ func (x *QueryVector) ProtoReflect() protoreflect.Message { // Deprecated: Use QueryVector.ProtoReflect.Descriptor instead. func (*QueryVector) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{14} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{14} } func (x *QueryVector) GetValues() []float32 { @@ -1029,7 +1029,7 @@ type QueryRequest struct { IncludeMetadata bool `protobuf:"varint,5,opt,name=include_metadata,json=includeMetadata,proto3" json:"include_metadata,omitempty"` // DEPRECATED. The query vectors. Each `query()` request can contain only one of the parameters `queries`, `vector`, or `id`. // - // Deprecated: Marked as deprecated in data_2024-07.proto. + // Deprecated: Marked as deprecated in db_data_2024-10.proto. Queries []*QueryVector `protobuf:"bytes,6,rep,name=queries,proto3" json:"queries,omitempty"` // The query vector. This should be the same length as the dimension of the index being queried. Each `query()` request can contain only one of the parameters `id` or `vector`. Vector []float32 `protobuf:"fixed32,7,rep,packed,name=vector,proto3" json:"vector,omitempty"` @@ -1042,7 +1042,7 @@ type QueryRequest struct { func (x *QueryRequest) Reset() { *x = QueryRequest{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[15] + mi := &file_db_data_2024_10_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1055,7 +1055,7 @@ func (x *QueryRequest) String() string { func (*QueryRequest) ProtoMessage() {} func (x *QueryRequest) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[15] + mi := &file_db_data_2024_10_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1068,7 +1068,7 @@ func (x *QueryRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use QueryRequest.ProtoReflect.Descriptor instead. func (*QueryRequest) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{15} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{15} } func (x *QueryRequest) GetNamespace() string { @@ -1106,7 +1106,7 @@ func (x *QueryRequest) GetIncludeMetadata() bool { return false } -// Deprecated: Marked as deprecated in data_2024-07.proto. +// Deprecated: Marked as deprecated in db_data_2024-10.proto. func (x *QueryRequest) GetQueries() []*QueryVector { if x != nil { return x.Queries @@ -1150,7 +1150,7 @@ type SingleQueryResults struct { func (x *SingleQueryResults) Reset() { *x = SingleQueryResults{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[16] + mi := &file_db_data_2024_10_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1163,7 +1163,7 @@ func (x *SingleQueryResults) String() string { func (*SingleQueryResults) ProtoMessage() {} func (x *SingleQueryResults) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[16] + mi := &file_db_data_2024_10_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1176,7 +1176,7 @@ func (x *SingleQueryResults) ProtoReflect() protoreflect.Message { // Deprecated: Use SingleQueryResults.ProtoReflect.Descriptor instead. func (*SingleQueryResults) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{16} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{16} } func (x *SingleQueryResults) GetMatches() []*ScoredVector { @@ -1201,7 +1201,7 @@ type QueryResponse struct { // DEPRECATED. The results of each query. The order is the same as `QueryRequest.queries`. // - // Deprecated: Marked as deprecated in data_2024-07.proto. + // Deprecated: Marked as deprecated in db_data_2024-10.proto. Results []*SingleQueryResults `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` // The matches for the vectors. Matches []*ScoredVector `protobuf:"bytes,2,rep,name=matches,proto3" json:"matches,omitempty"` @@ -1214,7 +1214,7 @@ type QueryResponse struct { func (x *QueryResponse) Reset() { *x = QueryResponse{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[17] + mi := &file_db_data_2024_10_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1227,7 +1227,7 @@ func (x *QueryResponse) String() string { func (*QueryResponse) ProtoMessage() {} func (x *QueryResponse) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[17] + mi := &file_db_data_2024_10_proto_msgTypes[17] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1240,10 +1240,10 @@ func (x *QueryResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use QueryResponse.ProtoReflect.Descriptor instead. func (*QueryResponse) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{17} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{17} } -// Deprecated: Marked as deprecated in data_2024-07.proto. +// Deprecated: Marked as deprecated in db_data_2024-10.proto. func (x *QueryResponse) GetResults() []*SingleQueryResults { if x != nil { return x.Results @@ -1284,7 +1284,7 @@ type Usage struct { func (x *Usage) Reset() { *x = Usage{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[18] + mi := &file_db_data_2024_10_proto_msgTypes[18] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1297,7 +1297,7 @@ func (x *Usage) String() string { func (*Usage) ProtoMessage() {} func (x *Usage) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[18] + mi := &file_db_data_2024_10_proto_msgTypes[18] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1310,7 +1310,7 @@ func (x *Usage) ProtoReflect() protoreflect.Message { // Deprecated: Use Usage.ProtoReflect.Descriptor instead. func (*Usage) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{18} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{18} } func (x *Usage) GetReadUnits() uint32 { @@ -1340,7 +1340,7 @@ type UpdateRequest struct { func (x *UpdateRequest) Reset() { *x = UpdateRequest{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[19] + mi := &file_db_data_2024_10_proto_msgTypes[19] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1353,7 +1353,7 @@ func (x *UpdateRequest) String() string { func (*UpdateRequest) ProtoMessage() {} func (x *UpdateRequest) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[19] + mi := &file_db_data_2024_10_proto_msgTypes[19] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1366,7 +1366,7 @@ func (x *UpdateRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use UpdateRequest.ProtoReflect.Descriptor instead. func (*UpdateRequest) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{19} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{19} } func (x *UpdateRequest) GetId() string { @@ -1414,7 +1414,7 @@ type UpdateResponse struct { func (x *UpdateResponse) Reset() { *x = UpdateResponse{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[20] + mi := &file_db_data_2024_10_proto_msgTypes[20] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1427,7 +1427,7 @@ func (x *UpdateResponse) String() string { func (*UpdateResponse) ProtoMessage() {} func (x *UpdateResponse) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[20] + mi := &file_db_data_2024_10_proto_msgTypes[20] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1440,7 +1440,7 @@ func (x *UpdateResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use UpdateResponse.ProtoReflect.Descriptor instead. func (*UpdateResponse) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{20} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{20} } // The request for the `describe_index_stats` operation. @@ -1458,7 +1458,7 @@ type DescribeIndexStatsRequest struct { func (x *DescribeIndexStatsRequest) Reset() { *x = DescribeIndexStatsRequest{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[21] + mi := &file_db_data_2024_10_proto_msgTypes[21] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1471,7 +1471,7 @@ func (x *DescribeIndexStatsRequest) String() string { func (*DescribeIndexStatsRequest) ProtoMessage() {} func (x *DescribeIndexStatsRequest) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[21] + mi := &file_db_data_2024_10_proto_msgTypes[21] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1484,7 +1484,7 @@ func (x *DescribeIndexStatsRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use DescribeIndexStatsRequest.ProtoReflect.Descriptor instead. func (*DescribeIndexStatsRequest) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{21} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{21} } func (x *DescribeIndexStatsRequest) GetFilter() *structpb.Struct { @@ -1508,7 +1508,7 @@ type NamespaceSummary struct { func (x *NamespaceSummary) Reset() { *x = NamespaceSummary{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[22] + mi := &file_db_data_2024_10_proto_msgTypes[22] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1521,7 +1521,7 @@ func (x *NamespaceSummary) String() string { func (*NamespaceSummary) ProtoMessage() {} func (x *NamespaceSummary) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[22] + mi := &file_db_data_2024_10_proto_msgTypes[22] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1534,7 +1534,7 @@ func (x *NamespaceSummary) ProtoReflect() protoreflect.Message { // Deprecated: Use NamespaceSummary.ProtoReflect.Descriptor instead. func (*NamespaceSummary) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{22} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{22} } func (x *NamespaceSummary) GetVectorCount() uint32 { @@ -1569,7 +1569,7 @@ type DescribeIndexStatsResponse struct { func (x *DescribeIndexStatsResponse) Reset() { *x = DescribeIndexStatsResponse{} if protoimpl.UnsafeEnabled { - mi := &file_data_2024_07_proto_msgTypes[23] + mi := &file_db_data_2024_10_proto_msgTypes[23] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1582,7 +1582,7 @@ func (x *DescribeIndexStatsResponse) String() string { func (*DescribeIndexStatsResponse) ProtoMessage() {} func (x *DescribeIndexStatsResponse) ProtoReflect() protoreflect.Message { - mi := &file_data_2024_07_proto_msgTypes[23] + mi := &file_db_data_2024_10_proto_msgTypes[23] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1595,7 +1595,7 @@ func (x *DescribeIndexStatsResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use DescribeIndexStatsResponse.ProtoReflect.Descriptor instead. func (*DescribeIndexStatsResponse) Descriptor() ([]byte, []int) { - return file_data_2024_07_proto_rawDescGZIP(), []int{23} + return file_db_data_2024_10_proto_rawDescGZIP(), []int{23} } func (x *DescribeIndexStatsResponse) GetNamespaces() map[string]*NamespaceSummary { @@ -1626,272 +1626,273 @@ func (x *DescribeIndexStatsResponse) GetTotalVectorCount() uint32 { return 0 } -var File_data_2024_07_proto protoreflect.FileDescriptor - -var file_data_2024_07_proto_rawDesc = []byte{ - 0x0a, 0x12, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x32, 0x30, 0x32, 0x34, 0x2d, 0x30, 0x37, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, - 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x66, 0x69, 0x65, - 0x6c, 0x64, 0x5f, 0x62, 0x65, 0x68, 0x61, 0x76, 0x69, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x22, 0x4a, 0x0a, 0x0c, 0x53, 0x70, 0x61, 0x72, 0x73, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x73, 0x12, 0x1d, 0x0a, 0x07, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, - 0x28, 0x0d, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x07, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, +var File_db_data_2024_10_proto protoreflect.FileDescriptor + +var file_db_data_2024_10_proto_rawDesc = []byte{ + 0x0a, 0x15, 0x64, 0x62, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x32, 0x30, 0x32, 0x34, 0x2d, 0x31, + 0x30, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, + 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, + 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x62, 0x65, 0x68, 0x61, 0x76, 0x69, 0x6f, 0x72, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x4a, 0x0a, 0x0c, 0x53, 0x70, 0x61, 0x72, 0x73, 0x65, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x73, 0x12, 0x1d, 0x0a, 0x07, 0x69, 0x6e, 0x64, 0x69, 0x63, 0x65, 0x73, 0x18, + 0x01, 0x20, 0x03, 0x28, 0x0d, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x07, 0x69, 0x6e, 0x64, 0x69, + 0x63, 0x65, 0x73, 0x12, 0x1b, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, + 0x03, 0x28, 0x02, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, + 0x22, 0xa3, 0x01, 0x0a, 0x06, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x13, 0x0a, 0x02, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1b, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x02, - 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x22, 0xa3, 0x01, - 0x0a, 0x06, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x13, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1b, 0x0a, - 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x02, 0x42, 0x03, 0xe0, - 0x41, 0x02, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x32, 0x0a, 0x0d, 0x73, 0x70, - 0x61, 0x72, 0x73, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x32, 0x0a, + 0x0d, 0x73, 0x70, 0x61, 0x72, 0x73, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x53, 0x70, 0x61, 0x72, 0x73, 0x65, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x73, 0x52, 0x0c, 0x73, 0x70, 0x61, 0x72, 0x73, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, + 0x73, 0x12, 0x33, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x08, 0x6d, 0x65, + 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0xba, 0x01, 0x0a, 0x0c, 0x53, 0x63, 0x6f, 0x72, 0x65, + 0x64, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x13, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x02, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, + 0x73, 0x63, 0x6f, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x02, 0x52, 0x05, 0x73, 0x63, 0x6f, + 0x72, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, + 0x28, 0x02, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x32, 0x0a, 0x0d, 0x73, 0x70, + 0x61, 0x72, 0x73, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x53, 0x70, 0x61, 0x72, 0x73, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x52, 0x0c, 0x73, 0x70, 0x61, 0x72, 0x73, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x33, - 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, + 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x22, 0xba, 0x01, 0x0a, 0x0c, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x64, 0x56, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x12, 0x13, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x02, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x63, 0x6f, - 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x02, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x12, - 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x02, 0x52, - 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x32, 0x0a, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x73, - 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, - 0x2e, 0x53, 0x70, 0x61, 0x72, 0x73, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x52, 0x0c, 0x73, - 0x70, 0x61, 0x72, 0x73, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x33, 0x0a, 0x08, 0x6d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, + 0x61, 0x74, 0x61, 0x22, 0xa1, 0x01, 0x0a, 0x0c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x55, + 0x6e, 0x69, 0x6f, 0x6e, 0x12, 0x28, 0x0a, 0x06, 0x75, 0x70, 0x73, 0x65, 0x72, 0x74, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x06, 0x75, 0x70, 0x73, 0x65, 0x72, 0x74, 0x12, 0x28, + 0x0a, 0x06, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, + 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, + 0x52, 0x06, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x28, 0x0a, 0x06, 0x75, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x06, 0x75, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x42, 0x13, 0x0a, 0x11, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x55, 0x6e, 0x69, + 0x6f, 0x6e, 0x49, 0x6e, 0x6e, 0x65, 0x72, 0x22, 0x55, 0x0a, 0x0d, 0x55, 0x70, 0x73, 0x65, 0x72, + 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x26, 0x0a, 0x07, 0x76, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x07, 0x2e, 0x56, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x07, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, + 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22, 0x37, + 0x0a, 0x0e, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x25, 0x0a, 0x0e, 0x75, 0x70, 0x73, 0x65, 0x72, 0x74, 0x65, 0x64, 0x5f, 0x63, 0x6f, 0x75, + 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0d, 0x75, 0x70, 0x73, 0x65, 0x72, 0x74, + 0x65, 0x64, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x8f, 0x01, 0x0a, 0x0d, 0x44, 0x65, 0x6c, 0x65, + 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x69, 0x64, 0x73, + 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x03, 0x69, 0x64, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x64, + 0x65, 0x6c, 0x65, 0x74, 0x65, 0x5f, 0x61, 0x6c, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, + 0x09, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x41, 0x6c, 0x6c, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, + 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, + 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, + 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, + 0x74, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22, 0x10, 0x0a, 0x0e, 0x44, 0x65, 0x6c, + 0x65, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x43, 0x0a, 0x0c, 0x46, + 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x15, 0x0a, 0x03, 0x69, + 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x03, 0x69, + 0x64, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, + 0x22, 0xd6, 0x01, 0x0a, 0x0d, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x35, 0x0a, 0x07, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x18, 0x01, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x2e, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, + 0x52, 0x07, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, + 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, + 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x21, 0x0a, 0x05, 0x75, 0x73, 0x61, 0x67, 0x65, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x06, 0x2e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x48, 0x00, + 0x52, 0x05, 0x75, 0x73, 0x61, 0x67, 0x65, 0x88, 0x01, 0x01, 0x1a, 0x43, 0x0a, 0x0c, 0x56, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x1d, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x07, 0x2e, 0x56, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, + 0x08, 0x0a, 0x06, 0x5f, 0x75, 0x73, 0x61, 0x67, 0x65, 0x22, 0xbd, 0x01, 0x0a, 0x0b, 0x4c, 0x69, + 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x06, 0x70, 0x72, 0x65, + 0x66, 0x69, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x70, 0x72, 0x65, + 0x66, 0x69, 0x78, 0x88, 0x01, 0x01, 0x12, 0x19, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0d, 0x48, 0x01, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x88, 0x01, + 0x01, 0x12, 0x2e, 0x0a, 0x10, 0x70, 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x0f, 0x70, + 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x88, 0x01, + 0x01, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, + 0x09, 0x0a, 0x07, 0x5f, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x6c, + 0x69, 0x6d, 0x69, 0x74, 0x42, 0x13, 0x0a, 0x11, 0x5f, 0x70, 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x20, 0x0a, 0x0a, 0x50, 0x61, 0x67, + 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x65, 0x78, 0x74, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x65, 0x78, 0x74, 0x22, 0x1a, 0x0a, 0x08, 0x4c, + 0x69, 0x73, 0x74, 0x49, 0x74, 0x65, 0x6d, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0xbf, 0x01, 0x0a, 0x0c, 0x4c, 0x69, 0x73, 0x74, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x23, 0x0a, 0x07, 0x76, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x09, 0x2e, 0x4c, 0x69, 0x73, 0x74, + 0x49, 0x74, 0x65, 0x6d, 0x52, 0x07, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x30, 0x0a, + 0x0a, 0x70, 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x0b, 0x2e, 0x50, 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, + 0x52, 0x0a, 0x70, 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, + 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x21, 0x0a, + 0x05, 0x75, 0x73, 0x61, 0x67, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x06, 0x2e, 0x55, + 0x73, 0x61, 0x67, 0x65, 0x48, 0x01, 0x52, 0x05, 0x75, 0x73, 0x61, 0x67, 0x65, 0x88, 0x01, 0x01, + 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x70, 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, + 0x08, 0x0a, 0x06, 0x5f, 0x75, 0x73, 0x61, 0x67, 0x65, 0x22, 0xc2, 0x01, 0x0a, 0x0b, 0x51, 0x75, + 0x65, 0x72, 0x79, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x1b, 0x0a, 0x06, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x02, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x06, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x32, 0x0a, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x73, 0x65, + 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, + 0x53, 0x70, 0x61, 0x72, 0x73, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x52, 0x0c, 0x73, 0x70, + 0x61, 0x72, 0x73, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x13, 0x0a, 0x05, 0x74, 0x6f, + 0x70, 0x5f, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x04, 0x74, 0x6f, 0x70, 0x4b, 0x12, + 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x2f, 0x0a, + 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x22, 0xa1, 0x01, 0x0a, 0x0c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x55, 0x6e, 0x69, 0x6f, - 0x6e, 0x12, 0x28, 0x0a, 0x06, 0x75, 0x70, 0x73, 0x65, 0x72, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x0e, 0x2e, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x48, 0x00, 0x52, 0x06, 0x75, 0x70, 0x73, 0x65, 0x72, 0x74, 0x12, 0x28, 0x0a, 0x06, 0x64, - 0x65, 0x6c, 0x65, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x44, 0x65, - 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x06, 0x64, - 0x65, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x28, 0x0a, 0x06, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x06, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x42, - 0x13, 0x0a, 0x11, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x55, 0x6e, 0x69, 0x6f, 0x6e, 0x49, - 0x6e, 0x6e, 0x65, 0x72, 0x22, 0x55, 0x0a, 0x0d, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x26, 0x0a, 0x07, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x07, 0x2e, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x42, - 0x03, 0xe0, 0x41, 0x02, 0x52, 0x07, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x1c, 0x0a, - 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22, 0x37, 0x0a, 0x0e, 0x55, - 0x70, 0x73, 0x65, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x25, 0x0a, - 0x0e, 0x75, 0x70, 0x73, 0x65, 0x72, 0x74, 0x65, 0x64, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0d, 0x75, 0x70, 0x73, 0x65, 0x72, 0x74, 0x65, 0x64, 0x43, - 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x8f, 0x01, 0x0a, 0x0d, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x69, 0x64, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x09, 0x52, 0x03, 0x69, 0x64, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x64, 0x65, 0x6c, 0x65, - 0x74, 0x65, 0x5f, 0x61, 0x6c, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x64, 0x65, - 0x6c, 0x65, 0x74, 0x65, 0x41, 0x6c, 0x6c, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, - 0x70, 0x61, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, - 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x06, - 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22, 0x10, 0x0a, 0x0e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x43, 0x0a, 0x0c, 0x46, 0x65, 0x74, 0x63, - 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x15, 0x0a, 0x03, 0x69, 0x64, 0x73, 0x18, - 0x01, 0x20, 0x03, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x03, 0x69, 0x64, 0x73, 0x12, - 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22, 0xd6, 0x01, - 0x0a, 0x0d, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x35, 0x0a, 0x07, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x1b, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x2e, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x76, - 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, - 0x61, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, - 0x70, 0x61, 0x63, 0x65, 0x12, 0x21, 0x0a, 0x05, 0x75, 0x73, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x06, 0x2e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x48, 0x00, 0x52, 0x05, 0x75, - 0x73, 0x61, 0x67, 0x65, 0x88, 0x01, 0x01, 0x1a, 0x43, 0x0a, 0x0c, 0x56, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x1d, 0x0a, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x07, 0x2e, 0x56, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x08, 0x0a, 0x06, - 0x5f, 0x75, 0x73, 0x61, 0x67, 0x65, 0x22, 0xbd, 0x01, 0x0a, 0x0b, 0x4c, 0x69, 0x73, 0x74, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x06, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, - 0x88, 0x01, 0x01, 0x12, 0x19, 0x0a, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0d, 0x48, 0x01, 0x52, 0x05, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x88, 0x01, 0x01, 0x12, 0x2e, - 0x0a, 0x10, 0x70, 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x6f, 0x6b, - 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x0f, 0x70, 0x61, 0x67, 0x69, - 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x1c, - 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x09, 0x0a, 0x07, - 0x5f, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x6c, 0x69, 0x6d, 0x69, - 0x74, 0x42, 0x13, 0x0a, 0x11, 0x5f, 0x70, 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x20, 0x0a, 0x0a, 0x50, 0x61, 0x67, 0x69, 0x6e, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x65, 0x78, 0x74, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x04, 0x6e, 0x65, 0x78, 0x74, 0x22, 0x1a, 0x0a, 0x08, 0x4c, 0x69, 0x73, 0x74, - 0x49, 0x74, 0x65, 0x6d, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x02, 0x69, 0x64, 0x22, 0xbf, 0x01, 0x0a, 0x0c, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x23, 0x0a, 0x07, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x09, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x49, 0x74, 0x65, - 0x6d, 0x52, 0x07, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x30, 0x0a, 0x0a, 0x70, 0x61, - 0x67, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, - 0x2e, 0x50, 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x0a, 0x70, - 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x1c, 0x0a, 0x09, - 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x21, 0x0a, 0x05, 0x75, 0x73, - 0x61, 0x67, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x06, 0x2e, 0x55, 0x73, 0x61, 0x67, - 0x65, 0x48, 0x01, 0x52, 0x05, 0x75, 0x73, 0x61, 0x67, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0d, 0x0a, - 0x0b, 0x5f, 0x70, 0x61, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x08, 0x0a, 0x06, - 0x5f, 0x75, 0x73, 0x61, 0x67, 0x65, 0x22, 0xc2, 0x01, 0x0a, 0x0b, 0x51, 0x75, 0x65, 0x72, 0x79, - 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x1b, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x02, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x06, 0x76, 0x61, 0x6c, + 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22, 0xd1, + 0x02, 0x0a, 0x0c, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x18, 0x0a, + 0x05, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x42, 0x03, 0xe0, 0x41, + 0x02, 0x52, 0x04, 0x74, 0x6f, 0x70, 0x4b, 0x12, 0x2f, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, + 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, + 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x25, 0x0a, 0x0e, 0x69, 0x6e, 0x63, 0x6c, + 0x75, 0x64, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x0d, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, + 0x29, 0x0a, 0x10, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, + 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0f, 0x69, 0x6e, 0x63, 0x6c, 0x75, + 0x64, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x2a, 0x0a, 0x07, 0x71, 0x75, + 0x65, 0x72, 0x69, 0x65, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x51, 0x75, + 0x65, 0x72, 0x79, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x42, 0x02, 0x18, 0x01, 0x52, 0x07, 0x71, + 0x75, 0x65, 0x72, 0x69, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, + 0x18, 0x07, 0x20, 0x03, 0x28, 0x02, 0x52, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x32, + 0x0a, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x73, 0x65, 0x5f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, + 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x53, 0x70, 0x61, 0x72, 0x73, 0x65, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x73, 0x52, 0x0c, 0x73, 0x70, 0x61, 0x72, 0x73, 0x65, 0x56, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, + 0x69, 0x64, 0x22, 0x5b, 0x0a, 0x12, 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x51, 0x75, 0x65, 0x72, + 0x79, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x12, 0x27, 0x0a, 0x07, 0x6d, 0x61, 0x74, 0x63, + 0x68, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x53, 0x63, 0x6f, 0x72, + 0x65, 0x64, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x07, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, + 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22, + 0xb6, 0x01, 0x0a, 0x0d, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x31, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x51, 0x75, 0x65, 0x72, 0x79, + 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x42, 0x02, 0x18, 0x01, 0x52, 0x07, 0x72, 0x65, 0x73, + 0x75, 0x6c, 0x74, 0x73, 0x12, 0x27, 0x0a, 0x07, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x18, + 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x64, 0x56, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x52, 0x07, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x12, 0x1c, 0x0a, + 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x21, 0x0a, 0x05, 0x75, + 0x73, 0x61, 0x67, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x06, 0x2e, 0x55, 0x73, 0x61, + 0x67, 0x65, 0x48, 0x00, 0x52, 0x05, 0x75, 0x73, 0x61, 0x67, 0x65, 0x88, 0x01, 0x01, 0x42, 0x08, + 0x0a, 0x06, 0x5f, 0x75, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x0a, 0x05, 0x55, 0x73, 0x61, 0x67, + 0x65, 0x12, 0x22, 0x0a, 0x0a, 0x72, 0x65, 0x61, 0x64, 0x5f, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0d, 0x48, 0x00, 0x52, 0x09, 0x72, 0x65, 0x61, 0x64, 0x55, 0x6e, 0x69, + 0x74, 0x73, 0x88, 0x01, 0x01, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x72, 0x65, 0x61, 0x64, 0x5f, 0x75, + 0x6e, 0x69, 0x74, 0x73, 0x22, 0xca, 0x01, 0x0a, 0x0d, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x13, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x02, 0x69, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x02, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x32, 0x0a, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x73, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x53, 0x70, 0x61, 0x72, 0x73, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x52, 0x0c, 0x73, 0x70, 0x61, 0x72, 0x73, - 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x13, 0x0a, 0x05, 0x74, 0x6f, 0x70, 0x5f, 0x6b, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x04, 0x74, 0x6f, 0x70, 0x4b, 0x12, 0x1c, 0x0a, 0x09, - 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x66, 0x69, - 0x6c, 0x74, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, - 0x75, 0x63, 0x74, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22, 0xd1, 0x02, 0x0a, 0x0c, - 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, - 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x18, 0x0a, 0x05, 0x74, 0x6f, - 0x70, 0x5f, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x04, - 0x74, 0x6f, 0x70, 0x4b, 0x12, 0x2f, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x06, 0x66, - 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x25, 0x0a, 0x0e, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, - 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0d, 0x69, - 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x29, 0x0a, 0x10, - 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0f, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x4d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x2a, 0x0a, 0x07, 0x71, 0x75, 0x65, 0x72, 0x69, - 0x65, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, - 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x42, 0x02, 0x18, 0x01, 0x52, 0x07, 0x71, 0x75, 0x65, 0x72, - 0x69, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x07, 0x20, - 0x03, 0x28, 0x02, 0x52, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x32, 0x0a, 0x0d, 0x73, - 0x70, 0x61, 0x72, 0x73, 0x65, 0x5f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x09, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x53, 0x70, 0x61, 0x72, 0x73, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x73, 0x52, 0x0c, 0x73, 0x70, 0x61, 0x72, 0x73, 0x65, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, - 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, - 0x5b, 0x0a, 0x12, 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, - 0x73, 0x75, 0x6c, 0x74, 0x73, 0x12, 0x27, 0x0a, 0x07, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x64, 0x56, - 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x07, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x12, 0x1c, - 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22, 0xb6, 0x01, 0x0a, - 0x0d, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x31, - 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x13, 0x2e, 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x73, - 0x75, 0x6c, 0x74, 0x73, 0x42, 0x02, 0x18, 0x01, 0x52, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, - 0x73, 0x12, 0x27, 0x0a, 0x07, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x53, 0x63, 0x6f, 0x72, 0x65, 0x64, 0x56, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x52, 0x07, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x65, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, - 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, - 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x21, 0x0a, 0x05, 0x75, 0x73, 0x61, 0x67, - 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x06, 0x2e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x48, - 0x00, 0x52, 0x05, 0x75, 0x73, 0x61, 0x67, 0x65, 0x88, 0x01, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x5f, - 0x75, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x0a, 0x05, 0x55, 0x73, 0x61, 0x67, 0x65, 0x12, 0x22, - 0x0a, 0x0a, 0x72, 0x65, 0x61, 0x64, 0x5f, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0d, 0x48, 0x00, 0x52, 0x09, 0x72, 0x65, 0x61, 0x64, 0x55, 0x6e, 0x69, 0x74, 0x73, 0x88, - 0x01, 0x01, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x72, 0x65, 0x61, 0x64, 0x5f, 0x75, 0x6e, 0x69, 0x74, - 0x73, 0x22, 0xca, 0x01, 0x0a, 0x0d, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x12, 0x13, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, - 0x03, 0xe0, 0x41, 0x02, 0x52, 0x02, 0x69, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x02, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, - 0x12, 0x32, 0x0a, 0x0d, 0x73, 0x70, 0x61, 0x72, 0x73, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x53, 0x70, 0x61, 0x72, 0x73, 0x65, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x52, 0x0c, 0x73, 0x70, 0x61, 0x72, 0x73, 0x65, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0c, 0x73, 0x65, 0x74, 0x5f, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, - 0x75, 0x63, 0x74, 0x52, 0x0b, 0x73, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22, 0x10, - 0x0a, 0x0e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x4c, 0x0a, 0x19, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x49, 0x6e, 0x64, 0x65, - 0x78, 0x53, 0x74, 0x61, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, - 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, + 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0c, 0x73, 0x65, 0x74, 0x5f, 0x6d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22, 0x35, - 0x0a, 0x10, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x53, 0x75, 0x6d, 0x6d, 0x61, - 0x72, 0x79, 0x12, 0x21, 0x0a, 0x0c, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x5f, 0x63, 0x6f, 0x75, - 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0b, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, - 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0xae, 0x02, 0x0a, 0x1a, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, - 0x62, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x53, 0x74, 0x61, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4b, 0x0a, 0x0a, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x44, 0x65, 0x73, 0x63, 0x72, - 0x69, 0x62, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x53, 0x74, 0x61, 0x74, 0x73, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0d, 0x52, 0x09, 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x12, - 0x25, 0x0a, 0x0e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x5f, 0x66, 0x75, 0x6c, 0x6c, 0x6e, 0x65, 0x73, - 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0d, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x46, 0x75, - 0x6c, 0x6c, 0x6e, 0x65, 0x73, 0x73, 0x12, 0x2c, 0x0a, 0x12, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x5f, - 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x0d, 0x52, 0x10, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x43, - 0x6f, 0x75, 0x6e, 0x74, 0x1a, 0x50, 0x0a, 0x0f, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x27, 0x0a, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x73, - 0x70, 0x61, 0x63, 0x65, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x32, 0xb9, 0x04, 0x0a, 0x0d, 0x56, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x45, 0x0a, 0x06, 0x55, 0x70, 0x73, 0x65, - 0x72, 0x74, 0x12, 0x0e, 0x2e, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x0f, 0x2e, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x22, 0x1a, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x14, 0x3a, 0x01, 0x2a, 0x22, 0x0f, - 0x2f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x2f, 0x75, 0x70, 0x73, 0x65, 0x72, 0x74, 0x12, - 0x58, 0x0a, 0x06, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x0e, 0x2e, 0x44, 0x65, 0x6c, 0x65, - 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e, 0x44, 0x65, 0x6c, 0x65, - 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2d, 0x82, 0xd3, 0xe4, 0x93, - 0x02, 0x27, 0x3a, 0x01, 0x2a, 0x5a, 0x11, 0x2a, 0x0f, 0x2f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, - 0x73, 0x2f, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x22, 0x0f, 0x2f, 0x76, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x73, 0x2f, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x3e, 0x0a, 0x05, 0x46, 0x65, 0x74, - 0x63, 0x68, 0x12, 0x0d, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x0e, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x22, 0x16, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x10, 0x12, 0x0e, 0x2f, 0x76, 0x65, 0x63, 0x74, - 0x6f, 0x72, 0x73, 0x2f, 0x66, 0x65, 0x74, 0x63, 0x68, 0x12, 0x3a, 0x0a, 0x04, 0x4c, 0x69, 0x73, - 0x74, 0x12, 0x0c, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x0d, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x15, - 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x0f, 0x12, 0x0d, 0x2f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, - 0x2f, 0x6c, 0x69, 0x73, 0x74, 0x12, 0x39, 0x0a, 0x05, 0x51, 0x75, 0x65, 0x72, 0x79, 0x12, 0x0d, - 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0e, 0x2e, - 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x11, 0x82, - 0xd3, 0xe4, 0x93, 0x02, 0x0b, 0x3a, 0x01, 0x2a, 0x22, 0x06, 0x2f, 0x71, 0x75, 0x65, 0x72, 0x79, - 0x12, 0x45, 0x0a, 0x06, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x0e, 0x2e, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x1a, 0x82, 0xd3, 0xe4, - 0x93, 0x02, 0x14, 0x3a, 0x01, 0x2a, 0x22, 0x0f, 0x2f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, - 0x2f, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x88, 0x01, 0x0a, 0x12, 0x44, 0x65, 0x73, 0x63, - 0x72, 0x69, 0x62, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x53, 0x74, 0x61, 0x74, 0x73, 0x12, 0x1a, - 0x2e, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x53, 0x74, - 0x61, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x44, 0x65, 0x73, + 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x0b, 0x73, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, + 0x61, 0x74, 0x61, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x22, 0x10, 0x0a, 0x0e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0x4c, 0x0a, 0x19, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x49, + 0x6e, 0x64, 0x65, 0x78, 0x53, 0x74, 0x61, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x2f, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, + 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, + 0x72, 0x22, 0x35, 0x0a, 0x10, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x53, 0x75, + 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x21, 0x0a, 0x0c, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x5f, + 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0b, 0x76, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0xae, 0x02, 0x0a, 0x1a, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x53, 0x74, 0x61, 0x74, 0x73, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x39, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x33, 0x3a, - 0x01, 0x2a, 0x5a, 0x17, 0x12, 0x15, 0x2f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x5f, - 0x69, 0x6e, 0x64, 0x65, 0x78, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x73, 0x22, 0x15, 0x2f, 0x64, 0x65, - 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x5f, 0x73, 0x74, 0x61, - 0x74, 0x73, 0x42, 0x4b, 0x0a, 0x11, 0x69, 0x6f, 0x2e, 0x70, 0x69, 0x6e, 0x65, 0x63, 0x6f, 0x6e, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x34, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x69, 0x6e, 0x65, 0x63, 0x6f, 0x6e, 0x65, 0x2d, 0x69, - 0x6f, 0x2f, 0x67, 0x6f, 0x2d, 0x70, 0x69, 0x6e, 0x65, 0x63, 0x6f, 0x6e, 0x65, 0x2f, 0x69, 0x6e, - 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x64, 0x61, 0x74, 0x61, 0x62, - 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4b, 0x0a, 0x0a, 0x6e, 0x61, 0x6d, 0x65, 0x73, + 0x70, 0x61, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x44, 0x65, + 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x53, 0x74, 0x61, 0x74, 0x73, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, + 0x61, 0x63, 0x65, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, + 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x09, 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, + 0x6f, 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x5f, 0x66, 0x75, 0x6c, 0x6c, + 0x6e, 0x65, 0x73, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0d, 0x69, 0x6e, 0x64, 0x65, + 0x78, 0x46, 0x75, 0x6c, 0x6c, 0x6e, 0x65, 0x73, 0x73, 0x12, 0x2c, 0x0a, 0x12, 0x74, 0x6f, 0x74, + 0x61, 0x6c, 0x5f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x10, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x56, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x1a, 0x50, 0x0a, 0x0f, 0x4e, 0x61, 0x6d, 0x65, 0x73, + 0x70, 0x61, 0x63, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x27, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x4e, 0x61, + 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x32, 0xb9, 0x04, 0x0a, 0x0d, 0x56, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x45, 0x0a, 0x06, 0x55, + 0x70, 0x73, 0x65, 0x72, 0x74, 0x12, 0x0e, 0x2e, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x1a, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x14, 0x3a, 0x01, + 0x2a, 0x22, 0x0f, 0x2f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x2f, 0x75, 0x70, 0x73, 0x65, + 0x72, 0x74, 0x12, 0x58, 0x0a, 0x06, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x0e, 0x2e, 0x44, + 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e, 0x44, + 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2d, 0x82, + 0xd3, 0xe4, 0x93, 0x02, 0x27, 0x3a, 0x01, 0x2a, 0x5a, 0x11, 0x2a, 0x0f, 0x2f, 0x76, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x73, 0x2f, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x22, 0x0f, 0x2f, 0x76, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x73, 0x2f, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x3e, 0x0a, 0x05, + 0x46, 0x65, 0x74, 0x63, 0x68, 0x12, 0x0d, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0e, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x16, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x10, 0x12, 0x0e, 0x2f, 0x76, + 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x2f, 0x66, 0x65, 0x74, 0x63, 0x68, 0x12, 0x3a, 0x0a, 0x04, + 0x4c, 0x69, 0x73, 0x74, 0x12, 0x0c, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x0d, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x22, 0x15, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x0f, 0x12, 0x0d, 0x2f, 0x76, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x73, 0x2f, 0x6c, 0x69, 0x73, 0x74, 0x12, 0x39, 0x0a, 0x05, 0x51, 0x75, 0x65, 0x72, + 0x79, 0x12, 0x0d, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x0e, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x22, 0x11, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x0b, 0x3a, 0x01, 0x2a, 0x22, 0x06, 0x2f, 0x71, 0x75, + 0x65, 0x72, 0x79, 0x12, 0x45, 0x0a, 0x06, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x0e, 0x2e, + 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e, + 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x1a, + 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x14, 0x3a, 0x01, 0x2a, 0x22, 0x0f, 0x2f, 0x76, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x73, 0x2f, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x88, 0x01, 0x0a, 0x12, 0x44, + 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x53, 0x74, 0x61, 0x74, + 0x73, 0x12, 0x1a, 0x2e, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x49, 0x6e, 0x64, 0x65, + 0x78, 0x53, 0x74, 0x61, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, + 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x53, 0x74, 0x61, + 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x39, 0x82, 0xd3, 0xe4, 0x93, + 0x02, 0x33, 0x3a, 0x01, 0x2a, 0x5a, 0x17, 0x12, 0x15, 0x2f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, + 0x62, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x73, 0x22, 0x15, + 0x2f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x5f, + 0x73, 0x74, 0x61, 0x74, 0x73, 0x42, 0x53, 0x0a, 0x11, 0x69, 0x6f, 0x2e, 0x70, 0x69, 0x6e, 0x65, + 0x63, 0x6f, 0x6e, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x3c, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x69, 0x6e, 0x65, 0x63, 0x6f, 0x6e, + 0x65, 0x2d, 0x69, 0x6f, 0x2f, 0x67, 0x6f, 0x2d, 0x70, 0x69, 0x6e, 0x65, 0x63, 0x6f, 0x6e, 0x65, + 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x64, 0x62, + 0x5f, 0x64, 0x61, 0x74, 0x61, 0x2f, 0x67, 0x72, 0x70, 0x63, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x33, } var ( - file_data_2024_07_proto_rawDescOnce sync.Once - file_data_2024_07_proto_rawDescData = file_data_2024_07_proto_rawDesc + file_db_data_2024_10_proto_rawDescOnce sync.Once + file_db_data_2024_10_proto_rawDescData = file_db_data_2024_10_proto_rawDesc ) -func file_data_2024_07_proto_rawDescGZIP() []byte { - file_data_2024_07_proto_rawDescOnce.Do(func() { - file_data_2024_07_proto_rawDescData = protoimpl.X.CompressGZIP(file_data_2024_07_proto_rawDescData) +func file_db_data_2024_10_proto_rawDescGZIP() []byte { + file_db_data_2024_10_proto_rawDescOnce.Do(func() { + file_db_data_2024_10_proto_rawDescData = protoimpl.X.CompressGZIP(file_db_data_2024_10_proto_rawDescData) }) - return file_data_2024_07_proto_rawDescData + return file_db_data_2024_10_proto_rawDescData } -var file_data_2024_07_proto_msgTypes = make([]protoimpl.MessageInfo, 26) -var file_data_2024_07_proto_goTypes = []interface{}{ +var file_db_data_2024_10_proto_msgTypes = make([]protoimpl.MessageInfo, 26) +var file_db_data_2024_10_proto_goTypes = []interface{}{ (*SparseValues)(nil), // 0: SparseValues (*Vector)(nil), // 1: Vector (*ScoredVector)(nil), // 2: ScoredVector @@ -1920,7 +1921,7 @@ var file_data_2024_07_proto_goTypes = []interface{}{ nil, // 25: DescribeIndexStatsResponse.NamespacesEntry (*structpb.Struct)(nil), // 26: google.protobuf.Struct } -var file_data_2024_07_proto_depIdxs = []int32{ +var file_db_data_2024_10_proto_depIdxs = []int32{ 0, // 0: Vector.sparse_values:type_name -> SparseValues 26, // 1: Vector.metadata:type_name -> google.protobuf.Struct 0, // 2: ScoredVector.sparse_values:type_name -> SparseValues @@ -1971,13 +1972,13 @@ var file_data_2024_07_proto_depIdxs = []int32{ 0, // [0:29] is the sub-list for field type_name } -func init() { file_data_2024_07_proto_init() } -func file_data_2024_07_proto_init() { - if File_data_2024_07_proto != nil { +func init() { file_db_data_2024_10_proto_init() } +func file_db_data_2024_10_proto_init() { + if File_db_data_2024_10_proto != nil { return } if !protoimpl.UnsafeEnabled { - file_data_2024_07_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SparseValues); i { case 0: return &v.state @@ -1989,7 +1990,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Vector); i { case 0: return &v.state @@ -2001,7 +2002,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ScoredVector); i { case 0: return &v.state @@ -2013,7 +2014,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*RequestUnion); i { case 0: return &v.state @@ -2025,7 +2026,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*UpsertRequest); i { case 0: return &v.state @@ -2037,7 +2038,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*UpsertResponse); i { case 0: return &v.state @@ -2049,7 +2050,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DeleteRequest); i { case 0: return &v.state @@ -2061,7 +2062,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DeleteResponse); i { case 0: return &v.state @@ -2073,7 +2074,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*FetchRequest); i { case 0: return &v.state @@ -2085,7 +2086,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*FetchResponse); i { case 0: return &v.state @@ -2097,7 +2098,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ListRequest); i { case 0: return &v.state @@ -2109,7 +2110,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Pagination); i { case 0: return &v.state @@ -2121,7 +2122,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ListItem); i { case 0: return &v.state @@ -2133,7 +2134,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ListResponse); i { case 0: return &v.state @@ -2145,7 +2146,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*QueryVector); i { case 0: return &v.state @@ -2157,7 +2158,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*QueryRequest); i { case 0: return &v.state @@ -2169,7 +2170,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SingleQueryResults); i { case 0: return &v.state @@ -2181,7 +2182,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*QueryResponse); i { case 0: return &v.state @@ -2193,7 +2194,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Usage); i { case 0: return &v.state @@ -2205,7 +2206,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*UpdateRequest); i { case 0: return &v.state @@ -2217,7 +2218,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*UpdateResponse); i { case 0: return &v.state @@ -2229,7 +2230,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DescribeIndexStatsRequest); i { case 0: return &v.state @@ -2241,7 +2242,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*NamespaceSummary); i { case 0: return &v.state @@ -2253,7 +2254,7 @@ func file_data_2024_07_proto_init() { return nil } } - file_data_2024_07_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { + file_db_data_2024_10_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*DescribeIndexStatsResponse); i { case 0: return &v.state @@ -2266,32 +2267,32 @@ func file_data_2024_07_proto_init() { } } } - file_data_2024_07_proto_msgTypes[3].OneofWrappers = []interface{}{ + file_db_data_2024_10_proto_msgTypes[3].OneofWrappers = []interface{}{ (*RequestUnion_Upsert)(nil), (*RequestUnion_Delete)(nil), (*RequestUnion_Update)(nil), } - file_data_2024_07_proto_msgTypes[9].OneofWrappers = []interface{}{} - file_data_2024_07_proto_msgTypes[10].OneofWrappers = []interface{}{} - file_data_2024_07_proto_msgTypes[13].OneofWrappers = []interface{}{} - file_data_2024_07_proto_msgTypes[17].OneofWrappers = []interface{}{} - file_data_2024_07_proto_msgTypes[18].OneofWrappers = []interface{}{} + file_db_data_2024_10_proto_msgTypes[9].OneofWrappers = []interface{}{} + file_db_data_2024_10_proto_msgTypes[10].OneofWrappers = []interface{}{} + file_db_data_2024_10_proto_msgTypes[13].OneofWrappers = []interface{}{} + file_db_data_2024_10_proto_msgTypes[17].OneofWrappers = []interface{}{} + file_db_data_2024_10_proto_msgTypes[18].OneofWrappers = []interface{}{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_data_2024_07_proto_rawDesc, + RawDescriptor: file_db_data_2024_10_proto_rawDesc, NumEnums: 0, NumMessages: 26, NumExtensions: 0, NumServices: 1, }, - GoTypes: file_data_2024_07_proto_goTypes, - DependencyIndexes: file_data_2024_07_proto_depIdxs, - MessageInfos: file_data_2024_07_proto_msgTypes, + GoTypes: file_db_data_2024_10_proto_goTypes, + DependencyIndexes: file_db_data_2024_10_proto_depIdxs, + MessageInfos: file_db_data_2024_10_proto_msgTypes, }.Build() - File_data_2024_07_proto = out.File - file_data_2024_07_proto_rawDesc = nil - file_data_2024_07_proto_goTypes = nil - file_data_2024_07_proto_depIdxs = nil + File_db_data_2024_10_proto = out.File + file_db_data_2024_10_proto_rawDesc = nil + file_db_data_2024_10_proto_goTypes = nil + file_db_data_2024_10_proto_depIdxs = nil } diff --git a/internal/gen/data/data_2024-07_grpc.pb.go b/internal/gen/db_data/grpc/db_data_2024-10_grpc.pb.go similarity index 99% rename from internal/gen/data/data_2024-07_grpc.pb.go rename to internal/gen/db_data/grpc/db_data_2024-10_grpc.pb.go index 96fd799..c296696 100644 --- a/internal/gen/data/data_2024-07_grpc.pb.go +++ b/internal/gen/db_data/grpc/db_data_2024-10_grpc.pb.go @@ -2,9 +2,9 @@ // versions: // - protoc-gen-go-grpc v1.3.0 // - protoc v5.27.1 -// source: data_2024-07.proto +// source: db_data_2024-10.proto -package data +package grpc import ( context "context" @@ -409,5 +409,5 @@ var VectorService_ServiceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "data_2024-07.proto", + Metadata: "db_data_2024-10.proto", } diff --git a/internal/gen/db_data/rest/db_data_2024-10.oas.go b/internal/gen/db_data/rest/db_data_2024-10.oas.go new file mode 100644 index 0000000..755b14d --- /dev/null +++ b/internal/gen/db_data/rest/db_data_2024-10.oas.go @@ -0,0 +1,2280 @@ +// Package db_data provides primitives to interact with the openapi HTTP API. +// +// Code generated by github.com/oapi-codegen/oapi-codegen/v2 version v2.3.0 DO NOT EDIT. +package db_data + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strings" + "time" + + "github.com/oapi-codegen/runtime" +) + +const ( + ApiKeyAuthScopes = "ApiKeyAuth.Scopes" +) + +// Defines values for ImportErrorModeOnError. +const ( + Abort ImportErrorModeOnError = "abort" + Continue ImportErrorModeOnError = "continue" +) + +// Defines values for ImportModelStatus. +const ( + Cancelled ImportModelStatus = "Cancelled" + Completed ImportModelStatus = "Completed" + Failed ImportModelStatus = "Failed" + InProgress ImportModelStatus = "InProgress" + Pending ImportModelStatus = "Pending" +) + +// CancelImportResponse The response for the `cancel_import` operation. +type CancelImportResponse = map[string]interface{} + +// DeleteRequest The request for the `delete` operation. +type DeleteRequest struct { + // DeleteAll This indicates that all vectors in the index namespace should be deleted. + DeleteAll *bool `json:"deleteAll,omitempty"` + + // Filter If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See [Filter with metadata](https://docs.pinecone.io/guides/data/filter-with-metadata). + // Serverless indexes do not support delete by metadata. Instead, you can use the `list` operation to fetch the vector IDs based on their common ID prefix and then delete the records by ID. + Filter *map[string]interface{} `json:"filter,omitempty"` + + // Ids Vectors to delete. + Ids *[]string `json:"ids,omitempty"` + + // Namespace The namespace to delete vectors from, if applicable. + Namespace *string `json:"namespace,omitempty"` +} + +// DeleteResponse The response for the `delete` operation. +type DeleteResponse = map[string]interface{} + +// DescribeIndexStatsRequest The request for the `describe_index_stats` operation. +type DescribeIndexStatsRequest struct { + // Filter If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See [Filter with metadata](https://docs.pinecone.io/guides/data/filter-with-metadata). + // + // Serverless indexes do not support filtering `describe_index_stats` by metadata. + Filter *map[string]interface{} `json:"filter,omitempty"` +} + +// FetchResponse The response for the `fetch` operation. +type FetchResponse struct { + // Namespace The namespace of the vectors. + Namespace *string `json:"namespace,omitempty"` + Usage *Usage `json:"usage,omitempty"` + Vectors *map[string]Vector `json:"vectors,omitempty"` +} + +// ImportErrorMode Indicates how to respond to errors during the import process. +type ImportErrorMode struct { + // OnError Indicates how to respond to errors during the import process. + OnError *ImportErrorModeOnError `json:"onError,omitempty"` +} + +// ImportErrorModeOnError Indicates how to respond to errors during the import process. +type ImportErrorModeOnError string + +// ImportModel The model for an import operation. +type ImportModel struct { + // CreatedAt The start time of the import operation. + CreatedAt *time.Time `json:"createdAt,omitempty"` + + // Error The error message if the import process failed. + Error *string `json:"error,omitempty"` + + // FinishedAt The end time of the import operation. + FinishedAt *time.Time `json:"finishedAt,omitempty"` + + // Id Unique identifier for the import operation. + Id *string `json:"id,omitempty"` + + // PercentComplete The progress made by the operation out of 100 + PercentComplete *float32 `json:"percentComplete,omitempty"` + + // RecordsImported The number of records successfully imported. + RecordsImported *int64 `json:"recordsImported,omitempty"` + + // Status The status of the operation. + Status *ImportModelStatus `json:"status,omitempty"` + + // Uri The URI from where the data is imported. + Uri *string `json:"uri,omitempty"` +} + +// ImportModelStatus The status of the operation. +type ImportModelStatus string + +// IndexDescription The response for the `describe_index_stats` operation. +type IndexDescription struct { + // Dimension The dimension of the indexed vectors. + Dimension *int64 `json:"dimension,omitempty"` + + // IndexFullness The fullness of the index, regardless of whether a metadata filter expression was passed. The granularity of this metric is 10%. + // + // Serverless indexes scale automatically as needed, so index fullness is relevant only for pod-based indexes. + // + // The index fullness result may be inaccurate during pod resizing; to get the status of a pod resizing process, use [`describe_index`](https://docs.pinecone.io/reference/api/control-plane/describe_index). + IndexFullness *float32 `json:"indexFullness,omitempty"` + + // Namespaces A mapping for each namespace in the index from the namespace name to a summary of its contents. If a metadata filter expression is present, the summary will reflect only vectors matching that expression. + Namespaces *map[string]NamespaceSummary `json:"namespaces,omitempty"` + + // TotalVectorCount The total number of vectors in the index, regardless of whether a metadata filter expression was passed + TotalVectorCount *int64 `json:"totalVectorCount,omitempty"` +} + +// ListImportsResponse The response for the `list_imports` operation. +type ListImportsResponse struct { + Data *[]ImportModel `json:"data,omitempty"` + Pagination *Pagination `json:"pagination,omitempty"` +} + +// ListItem defines model for ListItem. +type ListItem struct { + Id *string `json:"id,omitempty"` +} + +// ListResponse The response for the `list` operation. +type ListResponse struct { + // Namespace The namespace of the vectors. + Namespace *string `json:"namespace,omitempty"` + Pagination *Pagination `json:"pagination,omitempty"` + Usage *Usage `json:"usage,omitempty"` + Vectors *[]ListItem `json:"vectors,omitempty"` +} + +// NamespaceSummary A summary of the contents of a namespace. +type NamespaceSummary struct { + // VectorCount The number of vectors stored in this namespace. Note that updates to this field may lag behind updates to the underlying index and corresponding query results, etc. + VectorCount *int64 `json:"vectorCount,omitempty"` +} + +// Pagination defines model for Pagination. +type Pagination struct { + Next *string `json:"next,omitempty"` +} + +// QueryRequest The request for the `query` operation. +type QueryRequest struct { + // Filter The filter to apply. You can use vector metadata to limit your search. See [Filter with metadata](https://docs.pinecone.io/guides/data/filter-with-metadata). + Filter *map[string]interface{} `json:"filter,omitempty"` + + // Id The unique ID of the vector to be used as a query vector. Each `query` request can contain only one of the parameters `queries`, `vector`, or `id`. + Id *string `json:"id,omitempty"` + + // IncludeMetadata Indicates whether metadata is included in the response as well as the ids. + IncludeMetadata *bool `json:"includeMetadata,omitempty"` + + // IncludeValues Indicates whether vector values are included in the response. + IncludeValues *bool `json:"includeValues,omitempty"` + + // Namespace The namespace to query. + Namespace *string `json:"namespace,omitempty"` + + // Queries DEPRECATED. The query vectors. Each `query()` request can contain only one of the parameters `queries`, `vector`, or `id`. + // Deprecated: + Queries *[]QueryVector `json:"queries,omitempty"` + + // SparseVector Vector sparse data. Represented as a list of indices and a list of corresponded values, which must be with the same length. + SparseVector *SparseValues `json:"sparseVector,omitempty"` + + // TopK The number of results to return for each query. + TopK int64 `json:"topK"` + + // Vector The query vector. This should be the same length as the dimension of the index being queried. Each `query` request can contain only one of the parameters `id` or `vector`. + Vector *[]float32 `json:"vector,omitempty"` +} + +// QueryResponse The response for the `query` operation. These are the matches found for a particular query vector. The matches are ordered from most similar to least similar. +type QueryResponse struct { + // Matches The matches for the vectors. + Matches *[]ScoredVector `json:"matches,omitempty"` + + // Namespace The namespace for the vectors. + Namespace *string `json:"namespace,omitempty"` + + // Results DEPRECATED. The results of each query. The order is the same as `QueryRequest.queries`. + // Deprecated: + Results *[]SingleQueryResults `json:"results,omitempty"` + Usage *Usage `json:"usage,omitempty"` +} + +// QueryVector A single query vector within a `QueryRequest`. +type QueryVector struct { + // Filter An override for the metadata filter to apply. This replaces the request-level filter. + Filter *map[string]interface{} `json:"filter,omitempty"` + + // Namespace An override the namespace to search. + Namespace *string `json:"namespace,omitempty"` + + // SparseValues Vector sparse data. Represented as a list of indices and a list of corresponded values, which must be with the same length. + SparseValues *SparseValues `json:"sparseValues,omitempty"` + + // TopK An override for the number of results to return for this query vector. + TopK *int64 `json:"topK,omitempty"` + + // Values The query vector values. This should be the same length as the dimension of the index being queried. + Values []float32 `json:"values"` +} + +// ScoredVector defines model for ScoredVector. +type ScoredVector struct { + // Id This is the vector's unique id. + Id string `json:"id"` + + // Metadata This is the metadata, if it is requested. + Metadata *map[string]interface{} `json:"metadata,omitempty"` + + // Score This is a measure of similarity between this vector and the query vector. The higher the score, the more they are similar. + Score *float32 `json:"score,omitempty"` + + // SparseValues Vector sparse data. Represented as a list of indices and a list of corresponded values, which must be with the same length. + SparseValues *SparseValues `json:"sparseValues,omitempty"` + + // Values This is the vector data, if it is requested. + Values *[]float32 `json:"values,omitempty"` +} + +// SingleQueryResults defines model for SingleQueryResults. +type SingleQueryResults struct { + // Matches The matches for the vectors. + Matches *[]ScoredVector `json:"matches,omitempty"` + + // Namespace The namespace for the vectors. + Namespace *string `json:"namespace,omitempty"` +} + +// SparseValues Vector sparse data. Represented as a list of indices and a list of corresponded values, which must be with the same length. +type SparseValues struct { + // Indices The indices of the sparse data. + Indices []int64 `json:"indices"` + + // Values The corresponding values of the sparse data, which must be with the same length as the indices. + Values []float32 `json:"values"` +} + +// StartImportRequest The request for the `start_import` operation. +type StartImportRequest struct { + // ErrorMode Indicates how to respond to errors during the import process. + ErrorMode *ImportErrorMode `json:"errorMode,omitempty"` + + // IntegrationId The id of the storage integration that should be used to access the data. + IntegrationId *string `json:"integrationId,omitempty"` + + // Uri The URI prefix under which the data to import is available. All data within this prefix will be listed then imported into the target index. Currently only `s3://` URIs are supported. + Uri string `json:"uri"` +} + +// StartImportResponse The response for the `start_import` operation. +type StartImportResponse struct { + // Id Unique identifier for the import operations. + Id *string `json:"id,omitempty"` +} + +// UpdateRequest The request for the `update` operation. +type UpdateRequest struct { + // Id Vector's unique id. + Id string `json:"id"` + + // Namespace The namespace containing the vector to update. + Namespace *string `json:"namespace,omitempty"` + + // SetMetadata Metadata to set for the vector. + SetMetadata *map[string]interface{} `json:"setMetadata,omitempty"` + + // SparseValues Vector sparse data. Represented as a list of indices and a list of corresponded values, which must be with the same length. + SparseValues *SparseValues `json:"sparseValues,omitempty"` + + // Values Vector data. + Values *[]float32 `json:"values,omitempty"` +} + +// UpdateResponse The response for the `update` operation. +type UpdateResponse = map[string]interface{} + +// UpsertRequest The request for the `upsert` operation. +type UpsertRequest struct { + // Namespace The namespace where you upsert vectors. + Namespace *string `json:"namespace,omitempty"` + + // Vectors An array containing the vectors to upsert. Recommended batch limit is 100 vectors. + Vectors []Vector `json:"vectors"` +} + +// UpsertResponse The response for the `upsert` operation. +type UpsertResponse struct { + // UpsertedCount The number of vectors upserted. + UpsertedCount *int64 `json:"upsertedCount,omitempty"` +} + +// Usage defines model for Usage. +type Usage struct { + // ReadUnits The number of read units consumed by this operation. + ReadUnits *int64 `json:"readUnits,omitempty"` +} + +// Vector defines model for Vector. +type Vector struct { + // Id This is the vector's unique id. + Id string `json:"id"` + + // Metadata This is the metadata included in the request. + Metadata *map[string]interface{} `json:"metadata,omitempty"` + + // SparseValues Vector sparse data. Represented as a list of indices and a list of corresponded values, which must be with the same length. + SparseValues *SparseValues `json:"sparseValues,omitempty"` + + // Values This is the vector data included in the request. + Values []float32 `json:"values"` +} + +// ProtobufAny defines model for protobufAny. +type ProtobufAny struct { + TypeUrl *string `json:"typeUrl,omitempty"` + Value *[]byte `json:"value,omitempty"` +} + +// RpcStatus defines model for rpcStatus. +type RpcStatus struct { + Code *int32 `json:"code,omitempty"` + Details *[]ProtobufAny `json:"details,omitempty"` + Message *string `json:"message,omitempty"` +} + +// ListBulkImportsParams defines parameters for ListBulkImports. +type ListBulkImportsParams struct { + // Limit Max number of operations to return per page. + Limit *int32 `form:"limit,omitempty" json:"limit,omitempty"` + + // PaginationToken Pagination token to continue a previous listing operation. + PaginationToken *string `form:"paginationToken,omitempty" json:"paginationToken,omitempty"` +} + +// FetchVectorsParams defines parameters for FetchVectors. +type FetchVectorsParams struct { + // Ids The vector IDs to fetch. Does not accept values containing spaces. + Ids []string `form:"ids" json:"ids"` + Namespace *string `form:"namespace,omitempty" json:"namespace,omitempty"` +} + +// ListVectorsParams defines parameters for ListVectors. +type ListVectorsParams struct { + // Prefix The vector IDs to fetch. Does not accept values containing spaces. + Prefix *string `form:"prefix,omitempty" json:"prefix,omitempty"` + + // Limit Max number of IDs to return per page. + Limit *int64 `form:"limit,omitempty" json:"limit,omitempty"` + + // PaginationToken Pagination token to continue a previous listing operation. + PaginationToken *string `form:"paginationToken,omitempty" json:"paginationToken,omitempty"` + Namespace *string `form:"namespace,omitempty" json:"namespace,omitempty"` +} + +// StartBulkImportJSONRequestBody defines body for StartBulkImport for application/json ContentType. +type StartBulkImportJSONRequestBody = StartImportRequest + +// DescribeIndexStatsJSONRequestBody defines body for DescribeIndexStats for application/json ContentType. +type DescribeIndexStatsJSONRequestBody = DescribeIndexStatsRequest + +// QueryVectorsJSONRequestBody defines body for QueryVectors for application/json ContentType. +type QueryVectorsJSONRequestBody = QueryRequest + +// DeleteVectorsJSONRequestBody defines body for DeleteVectors for application/json ContentType. +type DeleteVectorsJSONRequestBody = DeleteRequest + +// UpdateVectorJSONRequestBody defines body for UpdateVector for application/json ContentType. +type UpdateVectorJSONRequestBody = UpdateRequest + +// UpsertVectorsJSONRequestBody defines body for UpsertVectors for application/json ContentType. +type UpsertVectorsJSONRequestBody = UpsertRequest + +// RequestEditorFn is the function signature for the RequestEditor callback function +type RequestEditorFn func(ctx context.Context, req *http.Request) error + +// Doer performs HTTP requests. +// +// The standard http.Client implements this interface. +type HttpRequestDoer interface { + Do(req *http.Request) (*http.Response, error) +} + +// Client which conforms to the OpenAPI3 specification for this service. +type Client struct { + // The endpoint of the server conforming to this interface, with scheme, + // https://api.deepmap.com for example. This can contain a path relative + // to the server, such as https://api.deepmap.com/dev-test, and all the + // paths in the swagger spec will be appended to the server. + Server string + + // Doer for performing requests, typically a *http.Client with any + // customized settings, such as certificate chains. + Client HttpRequestDoer + + // A list of callbacks for modifying requests which are generated before sending over + // the network. + RequestEditors []RequestEditorFn +} + +// ClientOption allows setting custom parameters during construction +type ClientOption func(*Client) error + +// Creates a new Client, with reasonable defaults +func NewClient(server string, opts ...ClientOption) (*Client, error) { + // create a client with sane default values + client := Client{ + Server: server, + } + // mutate client and add all optional params + for _, o := range opts { + if err := o(&client); err != nil { + return nil, err + } + } + // ensure the server URL always has a trailing slash + if !strings.HasSuffix(client.Server, "/") { + client.Server += "/" + } + // create httpClient, if not already present + if client.Client == nil { + client.Client = &http.Client{} + } + return &client, nil +} + +// WithHTTPClient allows overriding the default Doer, which is +// automatically created using http.Client. This is useful for tests. +func WithHTTPClient(doer HttpRequestDoer) ClientOption { + return func(c *Client) error { + c.Client = doer + return nil + } +} + +// WithRequestEditorFn allows setting up a callback function, which will be +// called right before sending the request. This can be used to mutate the request. +func WithRequestEditorFn(fn RequestEditorFn) ClientOption { + return func(c *Client) error { + c.RequestEditors = append(c.RequestEditors, fn) + return nil + } +} + +// The interface specification for the client above. +type ClientInterface interface { + // ListBulkImports request + ListBulkImports(ctx context.Context, params *ListBulkImportsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // StartBulkImportWithBody request with any body + StartBulkImportWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + StartBulkImport(ctx context.Context, body StartBulkImportJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CancelBulkImport request + CancelBulkImport(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DescribeBulkImport request + DescribeBulkImport(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DescribeIndexStatsWithBody request with any body + DescribeIndexStatsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + DescribeIndexStats(ctx context.Context, body DescribeIndexStatsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // QueryVectorsWithBody request with any body + QueryVectorsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + QueryVectors(ctx context.Context, body QueryVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteVectorsWithBody request with any body + DeleteVectorsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + DeleteVectors(ctx context.Context, body DeleteVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FetchVectors request + FetchVectors(ctx context.Context, params *FetchVectorsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ListVectors request + ListVectors(ctx context.Context, params *ListVectorsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateVectorWithBody request with any body + UpdateVectorWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateVector(ctx context.Context, body UpdateVectorJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpsertVectorsWithBody request with any body + UpsertVectorsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpsertVectors(ctx context.Context, body UpsertVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) +} + +func (c *Client) ListBulkImports(ctx context.Context, params *ListBulkImportsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewListBulkImportsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) StartBulkImportWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewStartBulkImportRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) StartBulkImport(ctx context.Context, body StartBulkImportJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewStartBulkImportRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CancelBulkImport(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCancelBulkImportRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DescribeBulkImport(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDescribeBulkImportRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DescribeIndexStatsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDescribeIndexStatsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DescribeIndexStats(ctx context.Context, body DescribeIndexStatsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDescribeIndexStatsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) QueryVectorsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewQueryVectorsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) QueryVectors(ctx context.Context, body QueryVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewQueryVectorsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteVectorsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteVectorsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteVectors(ctx context.Context, body DeleteVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteVectorsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FetchVectors(ctx context.Context, params *FetchVectorsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFetchVectorsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ListVectors(ctx context.Context, params *ListVectorsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewListVectorsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateVectorWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateVectorRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateVector(ctx context.Context, body UpdateVectorJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateVectorRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpsertVectorsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpsertVectorsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpsertVectors(ctx context.Context, body UpsertVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpsertVectorsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +// NewListBulkImportsRequest generates requests for ListBulkImports +func NewListBulkImportsRequest(server string, params *ListBulkImportsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/bulk/imports") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Limit != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "limit", runtime.ParamLocationQuery, *params.Limit); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PaginationToken != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "paginationToken", runtime.ParamLocationQuery, *params.PaginationToken); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewStartBulkImportRequest calls the generic StartBulkImport builder with application/json body +func NewStartBulkImportRequest(server string, body StartBulkImportJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewStartBulkImportRequestWithBody(server, "application/json", bodyReader) +} + +// NewStartBulkImportRequestWithBody generates requests for StartBulkImport with any type of body +func NewStartBulkImportRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/bulk/imports") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewCancelBulkImportRequest generates requests for CancelBulkImport +func NewCancelBulkImportRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/bulk/imports/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDescribeBulkImportRequest generates requests for DescribeBulkImport +func NewDescribeBulkImportRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/bulk/imports/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDescribeIndexStatsRequest calls the generic DescribeIndexStats builder with application/json body +func NewDescribeIndexStatsRequest(server string, body DescribeIndexStatsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewDescribeIndexStatsRequestWithBody(server, "application/json", bodyReader) +} + +// NewDescribeIndexStatsRequestWithBody generates requests for DescribeIndexStats with any type of body +func NewDescribeIndexStatsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/describe_index_stats") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewQueryVectorsRequest calls the generic QueryVectors builder with application/json body +func NewQueryVectorsRequest(server string, body QueryVectorsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewQueryVectorsRequestWithBody(server, "application/json", bodyReader) +} + +// NewQueryVectorsRequestWithBody generates requests for QueryVectors with any type of body +func NewQueryVectorsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/query") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteVectorsRequest calls the generic DeleteVectors builder with application/json body +func NewDeleteVectorsRequest(server string, body DeleteVectorsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewDeleteVectorsRequestWithBody(server, "application/json", bodyReader) +} + +// NewDeleteVectorsRequestWithBody generates requests for DeleteVectors with any type of body +func NewDeleteVectorsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/vectors/delete") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewFetchVectorsRequest generates requests for FetchVectors +func NewFetchVectorsRequest(server string, params *FetchVectorsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/vectors/fetch") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "ids", runtime.ParamLocationQuery, params.Ids); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if params.Namespace != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "namespace", runtime.ParamLocationQuery, *params.Namespace); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewListVectorsRequest generates requests for ListVectors +func NewListVectorsRequest(server string, params *ListVectorsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/vectors/list") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Prefix != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "prefix", runtime.ParamLocationQuery, *params.Prefix); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Limit != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "limit", runtime.ParamLocationQuery, *params.Limit); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PaginationToken != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "paginationToken", runtime.ParamLocationQuery, *params.PaginationToken); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Namespace != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "namespace", runtime.ParamLocationQuery, *params.Namespace); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewUpdateVectorRequest calls the generic UpdateVector builder with application/json body +func NewUpdateVectorRequest(server string, body UpdateVectorJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateVectorRequestWithBody(server, "application/json", bodyReader) +} + +// NewUpdateVectorRequestWithBody generates requests for UpdateVector with any type of body +func NewUpdateVectorRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/vectors/update") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewUpsertVectorsRequest calls the generic UpsertVectors builder with application/json body +func NewUpsertVectorsRequest(server string, body UpsertVectorsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpsertVectorsRequestWithBody(server, "application/json", bodyReader) +} + +// NewUpsertVectorsRequestWithBody generates requests for UpsertVectors with any type of body +func NewUpsertVectorsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/vectors/upsert") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +func (c *Client) applyEditors(ctx context.Context, req *http.Request, additionalEditors []RequestEditorFn) error { + for _, r := range c.RequestEditors { + if err := r(ctx, req); err != nil { + return err + } + } + for _, r := range additionalEditors { + if err := r(ctx, req); err != nil { + return err + } + } + return nil +} + +// ClientWithResponses builds on ClientInterface to offer response payloads +type ClientWithResponses struct { + ClientInterface +} + +// NewClientWithResponses creates a new ClientWithResponses, which wraps +// Client with return type handling +func NewClientWithResponses(server string, opts ...ClientOption) (*ClientWithResponses, error) { + client, err := NewClient(server, opts...) + if err != nil { + return nil, err + } + return &ClientWithResponses{client}, nil +} + +// WithBaseURL overrides the baseURL. +func WithBaseURL(baseURL string) ClientOption { + return func(c *Client) error { + newBaseURL, err := url.Parse(baseURL) + if err != nil { + return err + } + c.Server = newBaseURL.String() + return nil + } +} + +// ClientWithResponsesInterface is the interface specification for the client with responses above. +type ClientWithResponsesInterface interface { + // ListBulkImportsWithResponse request + ListBulkImportsWithResponse(ctx context.Context, params *ListBulkImportsParams, reqEditors ...RequestEditorFn) (*ListBulkImportsResponse, error) + + // StartBulkImportWithBodyWithResponse request with any body + StartBulkImportWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*StartBulkImportResponse, error) + + StartBulkImportWithResponse(ctx context.Context, body StartBulkImportJSONRequestBody, reqEditors ...RequestEditorFn) (*StartBulkImportResponse, error) + + // CancelBulkImportWithResponse request + CancelBulkImportWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*CancelBulkImportResponse, error) + + // DescribeBulkImportWithResponse request + DescribeBulkImportWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DescribeBulkImportResponse, error) + + // DescribeIndexStatsWithBodyWithResponse request with any body + DescribeIndexStatsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DescribeIndexStatsResponse, error) + + DescribeIndexStatsWithResponse(ctx context.Context, body DescribeIndexStatsJSONRequestBody, reqEditors ...RequestEditorFn) (*DescribeIndexStatsResponse, error) + + // QueryVectorsWithBodyWithResponse request with any body + QueryVectorsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*QueryVectorsResponse, error) + + QueryVectorsWithResponse(ctx context.Context, body QueryVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*QueryVectorsResponse, error) + + // DeleteVectorsWithBodyWithResponse request with any body + DeleteVectorsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteVectorsResponse, error) + + DeleteVectorsWithResponse(ctx context.Context, body DeleteVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteVectorsResponse, error) + + // FetchVectorsWithResponse request + FetchVectorsWithResponse(ctx context.Context, params *FetchVectorsParams, reqEditors ...RequestEditorFn) (*FetchVectorsResponse, error) + + // ListVectorsWithResponse request + ListVectorsWithResponse(ctx context.Context, params *ListVectorsParams, reqEditors ...RequestEditorFn) (*ListVectorsResponse, error) + + // UpdateVectorWithBodyWithResponse request with any body + UpdateVectorWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateVectorResponse, error) + + UpdateVectorWithResponse(ctx context.Context, body UpdateVectorJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateVectorResponse, error) + + // UpsertVectorsWithBodyWithResponse request with any body + UpsertVectorsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpsertVectorsResponse, error) + + UpsertVectorsWithResponse(ctx context.Context, body UpsertVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*UpsertVectorsResponse, error) +} + +type ListBulkImportsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ListImportsResponse + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r ListBulkImportsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ListBulkImportsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type StartBulkImportResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *StartImportResponse + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r StartBulkImportResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r StartBulkImportResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CancelBulkImportResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *CancelImportResponse + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r CancelBulkImportResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CancelBulkImportResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DescribeBulkImportResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ImportModel + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r DescribeBulkImportResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DescribeBulkImportResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DescribeIndexStatsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *IndexDescription + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r DescribeIndexStatsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DescribeIndexStatsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type QueryVectorsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *QueryResponse + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r QueryVectorsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r QueryVectorsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteVectorsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *DeleteResponse + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r DeleteVectorsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteVectorsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type FetchVectorsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *FetchResponse + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r FetchVectorsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r FetchVectorsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ListVectorsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ListResponse + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r ListVectorsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ListVectorsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UpdateVectorResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *UpdateResponse + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r UpdateVectorResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateVectorResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UpsertVectorsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *UpsertResponse + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r UpsertVectorsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpsertVectorsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +// ListBulkImportsWithResponse request returning *ListBulkImportsResponse +func (c *ClientWithResponses) ListBulkImportsWithResponse(ctx context.Context, params *ListBulkImportsParams, reqEditors ...RequestEditorFn) (*ListBulkImportsResponse, error) { + rsp, err := c.ListBulkImports(ctx, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseListBulkImportsResponse(rsp) +} + +// StartBulkImportWithBodyWithResponse request with arbitrary body returning *StartBulkImportResponse +func (c *ClientWithResponses) StartBulkImportWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*StartBulkImportResponse, error) { + rsp, err := c.StartBulkImportWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseStartBulkImportResponse(rsp) +} + +func (c *ClientWithResponses) StartBulkImportWithResponse(ctx context.Context, body StartBulkImportJSONRequestBody, reqEditors ...RequestEditorFn) (*StartBulkImportResponse, error) { + rsp, err := c.StartBulkImport(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseStartBulkImportResponse(rsp) +} + +// CancelBulkImportWithResponse request returning *CancelBulkImportResponse +func (c *ClientWithResponses) CancelBulkImportWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*CancelBulkImportResponse, error) { + rsp, err := c.CancelBulkImport(ctx, id, reqEditors...) + if err != nil { + return nil, err + } + return ParseCancelBulkImportResponse(rsp) +} + +// DescribeBulkImportWithResponse request returning *DescribeBulkImportResponse +func (c *ClientWithResponses) DescribeBulkImportWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DescribeBulkImportResponse, error) { + rsp, err := c.DescribeBulkImport(ctx, id, reqEditors...) + if err != nil { + return nil, err + } + return ParseDescribeBulkImportResponse(rsp) +} + +// DescribeIndexStatsWithBodyWithResponse request with arbitrary body returning *DescribeIndexStatsResponse +func (c *ClientWithResponses) DescribeIndexStatsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DescribeIndexStatsResponse, error) { + rsp, err := c.DescribeIndexStatsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseDescribeIndexStatsResponse(rsp) +} + +func (c *ClientWithResponses) DescribeIndexStatsWithResponse(ctx context.Context, body DescribeIndexStatsJSONRequestBody, reqEditors ...RequestEditorFn) (*DescribeIndexStatsResponse, error) { + rsp, err := c.DescribeIndexStats(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseDescribeIndexStatsResponse(rsp) +} + +// QueryVectorsWithBodyWithResponse request with arbitrary body returning *QueryVectorsResponse +func (c *ClientWithResponses) QueryVectorsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*QueryVectorsResponse, error) { + rsp, err := c.QueryVectorsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseQueryVectorsResponse(rsp) +} + +func (c *ClientWithResponses) QueryVectorsWithResponse(ctx context.Context, body QueryVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*QueryVectorsResponse, error) { + rsp, err := c.QueryVectors(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseQueryVectorsResponse(rsp) +} + +// DeleteVectorsWithBodyWithResponse request with arbitrary body returning *DeleteVectorsResponse +func (c *ClientWithResponses) DeleteVectorsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteVectorsResponse, error) { + rsp, err := c.DeleteVectorsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseDeleteVectorsResponse(rsp) +} + +func (c *ClientWithResponses) DeleteVectorsWithResponse(ctx context.Context, body DeleteVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteVectorsResponse, error) { + rsp, err := c.DeleteVectors(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseDeleteVectorsResponse(rsp) +} + +// FetchVectorsWithResponse request returning *FetchVectorsResponse +func (c *ClientWithResponses) FetchVectorsWithResponse(ctx context.Context, params *FetchVectorsParams, reqEditors ...RequestEditorFn) (*FetchVectorsResponse, error) { + rsp, err := c.FetchVectors(ctx, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseFetchVectorsResponse(rsp) +} + +// ListVectorsWithResponse request returning *ListVectorsResponse +func (c *ClientWithResponses) ListVectorsWithResponse(ctx context.Context, params *ListVectorsParams, reqEditors ...RequestEditorFn) (*ListVectorsResponse, error) { + rsp, err := c.ListVectors(ctx, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseListVectorsResponse(rsp) +} + +// UpdateVectorWithBodyWithResponse request with arbitrary body returning *UpdateVectorResponse +func (c *ClientWithResponses) UpdateVectorWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateVectorResponse, error) { + rsp, err := c.UpdateVectorWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseUpdateVectorResponse(rsp) +} + +func (c *ClientWithResponses) UpdateVectorWithResponse(ctx context.Context, body UpdateVectorJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateVectorResponse, error) { + rsp, err := c.UpdateVector(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseUpdateVectorResponse(rsp) +} + +// UpsertVectorsWithBodyWithResponse request with arbitrary body returning *UpsertVectorsResponse +func (c *ClientWithResponses) UpsertVectorsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpsertVectorsResponse, error) { + rsp, err := c.UpsertVectorsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseUpsertVectorsResponse(rsp) +} + +func (c *ClientWithResponses) UpsertVectorsWithResponse(ctx context.Context, body UpsertVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*UpsertVectorsResponse, error) { + rsp, err := c.UpsertVectors(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseUpsertVectorsResponse(rsp) +} + +// ParseListBulkImportsResponse parses an HTTP response from a ListBulkImportsWithResponse call +func ParseListBulkImportsResponse(rsp *http.Response) (*ListBulkImportsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &ListBulkImportsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ListImportsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseStartBulkImportResponse parses an HTTP response from a StartBulkImportWithResponse call +func ParseStartBulkImportResponse(rsp *http.Response) (*StartBulkImportResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &StartBulkImportResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest StartImportResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseCancelBulkImportResponse parses an HTTP response from a CancelBulkImportWithResponse call +func ParseCancelBulkImportResponse(rsp *http.Response) (*CancelBulkImportResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &CancelBulkImportResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest CancelImportResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseDescribeBulkImportResponse parses an HTTP response from a DescribeBulkImportWithResponse call +func ParseDescribeBulkImportResponse(rsp *http.Response) (*DescribeBulkImportResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &DescribeBulkImportResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ImportModel + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseDescribeIndexStatsResponse parses an HTTP response from a DescribeIndexStatsWithResponse call +func ParseDescribeIndexStatsResponse(rsp *http.Response) (*DescribeIndexStatsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &DescribeIndexStatsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest IndexDescription + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseQueryVectorsResponse parses an HTTP response from a QueryVectorsWithResponse call +func ParseQueryVectorsResponse(rsp *http.Response) (*QueryVectorsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &QueryVectorsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest QueryResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseDeleteVectorsResponse parses an HTTP response from a DeleteVectorsWithResponse call +func ParseDeleteVectorsResponse(rsp *http.Response) (*DeleteVectorsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &DeleteVectorsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest DeleteResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseFetchVectorsResponse parses an HTTP response from a FetchVectorsWithResponse call +func ParseFetchVectorsResponse(rsp *http.Response) (*FetchVectorsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &FetchVectorsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest FetchResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseListVectorsResponse parses an HTTP response from a ListVectorsWithResponse call +func ParseListVectorsResponse(rsp *http.Response) (*ListVectorsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &ListVectorsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ListResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseUpdateVectorResponse parses an HTTP response from a UpdateVectorWithResponse call +func ParseUpdateVectorResponse(rsp *http.Response) (*UpdateVectorResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &UpdateVectorResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest UpdateResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseUpsertVectorsResponse parses an HTTP response from a UpsertVectorsWithResponse call +func ParseUpsertVectorsResponse(rsp *http.Response) (*UpsertVectorsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &UpsertVectorsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest UpsertResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} diff --git a/internal/gen/inference/inference_2024-10.oas.go b/internal/gen/inference/inference_2024-10.oas.go new file mode 100644 index 0000000..13b2a59 --- /dev/null +++ b/internal/gen/inference/inference_2024-10.oas.go @@ -0,0 +1,603 @@ +// Package inference provides primitives to interact with the openapi HTTP API. +// +// Code generated by github.com/oapi-codegen/oapi-codegen/v2 version v2.3.0 DO NOT EDIT. +package inference + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strings" +) + +const ( + ApiKeyAuthScopes = "ApiKeyAuth.Scopes" +) + +// Defines values for ErrorResponseErrorCode. +const ( + ABORTED ErrorResponseErrorCode = "ABORTED" + ALREADYEXISTS ErrorResponseErrorCode = "ALREADY_EXISTS" + DATALOSS ErrorResponseErrorCode = "DATA_LOSS" + DEADLINEEXCEEDED ErrorResponseErrorCode = "DEADLINE_EXCEEDED" + FAILEDPRECONDITION ErrorResponseErrorCode = "FAILED_PRECONDITION" + FORBIDDEN ErrorResponseErrorCode = "FORBIDDEN" + INTERNAL ErrorResponseErrorCode = "INTERNAL" + INVALIDARGUMENT ErrorResponseErrorCode = "INVALID_ARGUMENT" + NOTFOUND ErrorResponseErrorCode = "NOT_FOUND" + OK ErrorResponseErrorCode = "OK" + OUTOFRANGE ErrorResponseErrorCode = "OUT_OF_RANGE" + PERMISSIONDENIED ErrorResponseErrorCode = "PERMISSION_DENIED" + QUOTAEXCEEDED ErrorResponseErrorCode = "QUOTA_EXCEEDED" + RESOURCEEXHAUSTED ErrorResponseErrorCode = "RESOURCE_EXHAUSTED" + UNAUTHENTICATED ErrorResponseErrorCode = "UNAUTHENTICATED" + UNAVAILABLE ErrorResponseErrorCode = "UNAVAILABLE" + UNIMPLEMENTED ErrorResponseErrorCode = "UNIMPLEMENTED" + UNKNOWN ErrorResponseErrorCode = "UNKNOWN" +) + +// Document Document for reranking +type Document map[string]string + +// EmbedRequest defines model for EmbedRequest. +type EmbedRequest struct { + // Inputs List of inputs to generate embeddings for. + Inputs []struct { + Text *string `json:"text,omitempty"` + } `json:"inputs"` + + // Model The [model](https://docs.pinecone.io/guides/inference/understanding-inference#models) to use for embedding generation. + Model string `json:"model"` + + // Parameters Model-specific parameters. + Parameters *struct { + // InputType Common property used to distinguish between types of data. + InputType *string `json:"input_type,omitempty"` + + // Truncate How to handle inputs longer than those supported by the model. If `"END"`, truncate the input sequence at the token limit. If `"NONE"`, return an error when the input exceeds the token limit. + Truncate *string `json:"truncate,omitempty"` + } `json:"parameters,omitempty"` +} + +// Embedding Embedding of a single input +type Embedding struct { + // Values The embedding values. + Values *[]float32 `json:"values,omitempty"` +} + +// EmbeddingsList Embeddings generated for the input +type EmbeddingsList struct { + // Data The embeddings generated for the inputs. + Data []Embedding `json:"data"` + + // Model The model used to generate the embeddings + Model string `json:"model"` + + // Usage Usage statistics for the model inference. + Usage struct { + // TotalTokens Total number of tokens consumed across all inputs. + TotalTokens *int `json:"total_tokens,omitempty"` + } `json:"usage"` +} + +// ErrorResponse The response shape used for all error responses. +type ErrorResponse struct { + // Error Detailed information about the error that occurred. + Error struct { + Code ErrorResponseErrorCode `json:"code"` + + // Details Additional information about the error. This field is not guaranteed to be present. + Details *map[string]interface{} `json:"details,omitempty"` + Message string `json:"message"` + } `json:"error"` + + // Status The HTTP status code of the error. + Status int `json:"status"` +} + +// ErrorResponseErrorCode defines model for ErrorResponse.Error.Code. +type ErrorResponseErrorCode string + +// RankedDocument A ranked document with a relevance score and an index position. +type RankedDocument struct { + // Document Document for reranking + Document *Document `json:"document,omitempty"` + + // Index The index of the document + Index int `json:"index"` + + // Score The relevance score of the document normalized between 0 and 1. + Score float32 `json:"score"` +} + +// RerankResult The result of a reranking request. +type RerankResult struct { + // Data The reranked documents. + Data []RankedDocument `json:"data"` + + // Model The model used to rerank documents. + Model string `json:"model"` + + // Usage Usage statistics for the model inference. + Usage struct { + RerankUnits *int `json:"rerank_units,omitempty"` + } `json:"usage"` +} + +// RerankJSONBody defines parameters for Rerank. +type RerankJSONBody struct { + // Documents The documents to rerank. + Documents []Document `json:"documents"` + + // Model The [model](https://docs.pinecone.io/guides/inference/understanding-inference#models) to use for reranking. + Model string `json:"model"` + + // Parameters Additional model-specific parameters for the reranker. + Parameters *map[string]string `json:"parameters,omitempty"` + + // Query The query to rerank documents against. + Query string `json:"query"` + + // RankFields The fields to rank the documents by. If not provided, the default is `"text"`. + RankFields *[]string `json:"rank_fields,omitempty"` + + // ReturnDocuments Whether to return the documents in the response. + ReturnDocuments *bool `json:"return_documents,omitempty"` + + // TopN The number of results to return sorted by relevance. Defaults to the number of inputs. + TopN *int `json:"top_n,omitempty"` +} + +// EmbedJSONRequestBody defines body for Embed for application/json ContentType. +type EmbedJSONRequestBody = EmbedRequest + +// RerankJSONRequestBody defines body for Rerank for application/json ContentType. +type RerankJSONRequestBody RerankJSONBody + +// RequestEditorFn is the function signature for the RequestEditor callback function +type RequestEditorFn func(ctx context.Context, req *http.Request) error + +// Doer performs HTTP requests. +// +// The standard http.Client implements this interface. +type HttpRequestDoer interface { + Do(req *http.Request) (*http.Response, error) +} + +// Client which conforms to the OpenAPI3 specification for this service. +type Client struct { + // The endpoint of the server conforming to this interface, with scheme, + // https://api.deepmap.com for example. This can contain a path relative + // to the server, such as https://api.deepmap.com/dev-test, and all the + // paths in the swagger spec will be appended to the server. + Server string + + // Doer for performing requests, typically a *http.Client with any + // customized settings, such as certificate chains. + Client HttpRequestDoer + + // A list of callbacks for modifying requests which are generated before sending over + // the network. + RequestEditors []RequestEditorFn +} + +// ClientOption allows setting custom parameters during construction +type ClientOption func(*Client) error + +// Creates a new Client, with reasonable defaults +func NewClient(server string, opts ...ClientOption) (*Client, error) { + // create a client with sane default values + client := Client{ + Server: server, + } + // mutate client and add all optional params + for _, o := range opts { + if err := o(&client); err != nil { + return nil, err + } + } + // ensure the server URL always has a trailing slash + if !strings.HasSuffix(client.Server, "/") { + client.Server += "/" + } + // create httpClient, if not already present + if client.Client == nil { + client.Client = &http.Client{} + } + return &client, nil +} + +// WithHTTPClient allows overriding the default Doer, which is +// automatically created using http.Client. This is useful for tests. +func WithHTTPClient(doer HttpRequestDoer) ClientOption { + return func(c *Client) error { + c.Client = doer + return nil + } +} + +// WithRequestEditorFn allows setting up a callback function, which will be +// called right before sending the request. This can be used to mutate the request. +func WithRequestEditorFn(fn RequestEditorFn) ClientOption { + return func(c *Client) error { + c.RequestEditors = append(c.RequestEditors, fn) + return nil + } +} + +// The interface specification for the client above. +type ClientInterface interface { + // EmbedWithBody request with any body + EmbedWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + Embed(ctx context.Context, body EmbedJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // RerankWithBody request with any body + RerankWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + Rerank(ctx context.Context, body RerankJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) +} + +func (c *Client) EmbedWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEmbedRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) Embed(ctx context.Context, body EmbedJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEmbedRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) RerankWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewRerankRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) Rerank(ctx context.Context, body RerankJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewRerankRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +// NewEmbedRequest calls the generic Embed builder with application/json body +func NewEmbedRequest(server string, body EmbedJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewEmbedRequestWithBody(server, "application/json", bodyReader) +} + +// NewEmbedRequestWithBody generates requests for Embed with any type of body +func NewEmbedRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/embed") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewRerankRequest calls the generic Rerank builder with application/json body +func NewRerankRequest(server string, body RerankJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewRerankRequestWithBody(server, "application/json", bodyReader) +} + +// NewRerankRequestWithBody generates requests for Rerank with any type of body +func NewRerankRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/rerank") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +func (c *Client) applyEditors(ctx context.Context, req *http.Request, additionalEditors []RequestEditorFn) error { + for _, r := range c.RequestEditors { + if err := r(ctx, req); err != nil { + return err + } + } + for _, r := range additionalEditors { + if err := r(ctx, req); err != nil { + return err + } + } + return nil +} + +// ClientWithResponses builds on ClientInterface to offer response payloads +type ClientWithResponses struct { + ClientInterface +} + +// NewClientWithResponses creates a new ClientWithResponses, which wraps +// Client with return type handling +func NewClientWithResponses(server string, opts ...ClientOption) (*ClientWithResponses, error) { + client, err := NewClient(server, opts...) + if err != nil { + return nil, err + } + return &ClientWithResponses{client}, nil +} + +// WithBaseURL overrides the baseURL. +func WithBaseURL(baseURL string) ClientOption { + return func(c *Client) error { + newBaseURL, err := url.Parse(baseURL) + if err != nil { + return err + } + c.Server = newBaseURL.String() + return nil + } +} + +// ClientWithResponsesInterface is the interface specification for the client with responses above. +type ClientWithResponsesInterface interface { + // EmbedWithBodyWithResponse request with any body + EmbedWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EmbedResponse, error) + + EmbedWithResponse(ctx context.Context, body EmbedJSONRequestBody, reqEditors ...RequestEditorFn) (*EmbedResponse, error) + + // RerankWithBodyWithResponse request with any body + RerankWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*RerankResponse, error) + + RerankWithResponse(ctx context.Context, body RerankJSONRequestBody, reqEditors ...RequestEditorFn) (*RerankResponse, error) +} + +type EmbedResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *EmbeddingsList + JSON400 *ErrorResponse + JSON401 *ErrorResponse + JSON500 *ErrorResponse +} + +// Status returns HTTPResponse.Status +func (r EmbedResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r EmbedResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type RerankResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *RerankResult + JSON400 *ErrorResponse + JSON401 *ErrorResponse + JSON500 *ErrorResponse +} + +// Status returns HTTPResponse.Status +func (r RerankResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r RerankResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +// EmbedWithBodyWithResponse request with arbitrary body returning *EmbedResponse +func (c *ClientWithResponses) EmbedWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EmbedResponse, error) { + rsp, err := c.EmbedWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseEmbedResponse(rsp) +} + +func (c *ClientWithResponses) EmbedWithResponse(ctx context.Context, body EmbedJSONRequestBody, reqEditors ...RequestEditorFn) (*EmbedResponse, error) { + rsp, err := c.Embed(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseEmbedResponse(rsp) +} + +// RerankWithBodyWithResponse request with arbitrary body returning *RerankResponse +func (c *ClientWithResponses) RerankWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*RerankResponse, error) { + rsp, err := c.RerankWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseRerankResponse(rsp) +} + +func (c *ClientWithResponses) RerankWithResponse(ctx context.Context, body RerankJSONRequestBody, reqEditors ...RequestEditorFn) (*RerankResponse, error) { + rsp, err := c.Rerank(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseRerankResponse(rsp) +} + +// ParseEmbedResponse parses an HTTP response from a EmbedWithResponse call +func ParseEmbedResponse(rsp *http.Response) (*EmbedResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &EmbedResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest EmbeddingsList + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil +} + +// ParseRerankResponse parses an HTTP response from a RerankWithResponse call +func ParseRerankResponse(rsp *http.Response) (*RerankResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &RerankResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest RerankResult + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil +} diff --git a/internal/provider/header.go b/internal/provider/header.go index 7c880e7..0a63d25 100644 --- a/internal/provider/header.go +++ b/internal/provider/header.go @@ -5,16 +5,16 @@ import ( "net/http" ) -type customHeader struct { +type CustomHeader struct { name string value string } -func NewHeaderProvider(name string, value string) *customHeader { - return &customHeader{name: name, value: value} +func NewHeaderProvider(name string, value string) *CustomHeader { + return &CustomHeader{name: name, value: value} } -func (h *customHeader) Intercept(ctx context.Context, req *http.Request) error { +func (h *CustomHeader) Intercept(ctx context.Context, req *http.Request) error { req.Header.Set(h.name, h.value) return nil } diff --git a/justfile b/justfile index 0bdc7e4..db8a291 100644 --- a/justfile +++ b/justfile @@ -1,4 +1,4 @@ -api_version := "2024-07" +api_version := "2024-10" test: #!/usr/bin/env bash diff --git a/pinecone/client.go b/pinecone/client.go index 261abba..661c1d8 100644 --- a/pinecone/client.go +++ b/pinecone/client.go @@ -15,29 +15,28 @@ import ( "strings" "github.com/pinecone-io/go-pinecone/internal/gen" - "github.com/pinecone-io/go-pinecone/internal/gen/control" + "github.com/pinecone-io/go-pinecone/internal/gen/db_control" + db_data_rest "github.com/pinecone-io/go-pinecone/internal/gen/db_data/rest" + "github.com/pinecone-io/go-pinecone/internal/gen/inference" "github.com/pinecone-io/go-pinecone/internal/provider" "github.com/pinecone-io/go-pinecone/internal/useragent" "google.golang.org/grpc" ) -// Client holds the parameters for connecting to the Pinecone service. It is returned by the NewClient and NewClientBase -// functions. To use Client, first build the parameters of the request using NewClientParams (or NewClientBaseParams). -// Then, pass those parameters into the NewClient (or NewClientBase) function to create a new Client object. -// Once instantiated, you can use Client to execute Pinecone API requests (e.g. create an Index, list Indexes, -// etc.), and Inference API requests. Read more about different Pinecone API routes at [docs.pinecone.io/reference/api]. +// [Client] holds the parameters for connecting to the Pinecone service. It is returned by the [NewClient] and [NewClientBase] +// functions. To use Client, first build the parameters of the request using [NewClientParams] (or [NewClientBaseParams]). +// Then, pass those parameters into the [NewClient] (or [NewClientBase]) function to create a new [Client] object. +// Once instantiated, you can use [Client] to execute Pinecone API requests (e.g. create an [Index], list Indexes, +// etc.), and Inference API requests. Read more about different Pinecone API routes [here]. // // Note: Client methods are safe for concurrent use. // // Fields: -// - Inference: An InferenceService object that exposes methods for interacting with the Pinecone [Inference API]. -// - headers: An optional map of HTTP headers to include in each API request, provided through -// NewClientParams.Headers or NewClientBaseParams.Headers. +// - Inference: An [InferenceService] object that exposes methods for interacting with the Pinecone [Inference API]. // - restClient: Optional underlying *http.Client object used to communicate with the Pinecone API, -// provided through NewClientParams.RestClient or NewClientBaseParams.RestClient. If not provided, +// provided through [NewClientParams.RestClient] or [NewClientBaseParams.RestClient]. If not provided, // a default client is created for you. -// - sourceTag: An optional string used to help Pinecone attribute API activity, provided through NewClientParams.SourceTag -// or NewClientBaseParams.SourceTag. +// - baseParams: A [NewClientBaseParams] object that holds the configuration for the Pinecone client. // // Example: // @@ -67,16 +66,15 @@ import ( // log.Println("IndexConnection created successfully!") // } // -// [docs.pinecone.io/reference/api]: https://docs.pinecone.io/reference/api/control-plane/list_indexes +// [here]: https://docs.pinecone.io/reference/api/control-plane/list_indexes // [Inference API]: https://docs.pinecone.io/reference/api/2024-07/inference/generate-embeddings type Client struct { Inference *InferenceService - headers map[string]string - restClient *control.Client - sourceTag string + restClient *db_control.Client + baseParams *NewClientBaseParams } -// NewClientParams holds the parameters for creating a new Client instance while authenticating via an API key. +// [NewClientParams] holds the parameters for creating a new [Client] instance while authenticating via an API key. // // Fields: // - ApiKey: (Required) The API key used to authenticate with the Pinecone API. @@ -86,7 +84,7 @@ type Client struct { // - RestClient: An optional HTTP client to use for communication with the Pinecone API. // - SourceTag: An optional string used to help Pinecone attribute API activity. // -// See Client for code example. +// See [Client] for code example. type NewClientParams struct { ApiKey string // required - provide through NewClientParams or environment variable PINECONE_API_KEY Headers map[string]string // optional @@ -95,7 +93,7 @@ type NewClientParams struct { SourceTag string // optional } -// NewClientBaseParams holds the parameters for creating a new Client instance while passing custom authentication +// [NewClientBaseParams] holds the parameters for creating a new [Client] instance while passing custom authentication // headers. If there is no API key or authentication provided through Headers, API calls will fail. // // Fields: @@ -106,7 +104,7 @@ type NewClientParams struct { // - RestClient: (Optional) An *http.Client object to use for communication with the Pinecone API. // - SourceTag: (Optional) A string used to help Pinecone attribute API activity. // -// See Client for code example. +// See [Client] for code example. type NewClientBaseParams struct { Headers map[string]string Host string @@ -114,31 +112,31 @@ type NewClientBaseParams struct { SourceTag string } -// NewIndexConnParams holds the parameters for creating an IndexConnection to a Pinecone index. +// [NewIndexConnParams] holds the parameters for creating an [IndexConnection] to a Pinecone index. // // Fields: -// - Host: (Required) The host URL of the Pinecone index. To find your host url use the DescribeIndex or ListIndexes methods. +// - Host: (Required) The host URL of the Pinecone index. To find your host url use the [Client.DescribeIndex] or [Client.ListIndexes] methods. // Alternatively, the host is displayed in the Pinecone web console. // - Namespace: (Optional) The index namespace to use for operations. If not provided, the default namespace of "" will be used. // - AdditionalMetadata: (Optional) Metadata to be sent with each RPC request. // -// See Client.Index for code example. +// See [Client.Index] for code example. type NewIndexConnParams struct { Host string // required - obtained through DescribeIndex or ListIndexes Namespace string // optional - if not provided the default namespace of "" will be used AdditionalMetadata map[string]string // optional } -// NewClient creates and initializes a new instance of Client. +// [NewClient] creates and initializes a new instance of [Client]. // This function sets up the Pinecone client with the necessary configuration for authentication and communication. // // Parameters: -// - in: A NewClientParams object. See NewClientParams for more information. +// - in: A [NewClientParams] object. See [NewClientParams] for more information. // // Note: It is important to handle the error returned by this function to ensure that the // Pinecone client has been created successfully before attempting to make API calls. // -// Returns a pointer to an initialized Client instance or an error. +// Returns a pointer to an initialized [Client] instance or an error. // // Example: // @@ -177,18 +175,18 @@ func NewClient(in NewClientParams) (*Client, error) { return NewClientBase(NewClientBaseParams{Headers: clientHeaders, Host: in.Host, RestClient: in.RestClient, SourceTag: in.SourceTag}) } -// NewClientBase creates and initializes a new instance of Client with custom authentication headers. +// [NewClientBase] creates and initializes a new instance of [Client] with custom authentication headers. // // Parameters: -// - in: A NewClientBaseParams object that includes the necessary configuration for the Pinecone client. See -// NewClientBaseParams for more information. +// - in: A [NewClientBaseParams] object that includes the necessary configuration for the Pinecone client. See +// [NewClientBaseParams] for more information. // // Notes: // - It is important to handle the error returned by this function to ensure that the // Pinecone client has been created successfully before attempting to make API calls. -// - A Pinecone API key is not required when using NewClientBase. +// - A Pinecone API key is not required when using [NewClientBase]. // -// Returns a pointer to an initialized Client instance or an error. +// Returns a pointer to an initialized [Client] instance or an error. // // Example: // @@ -209,7 +207,8 @@ func NewClient(in NewClientParams) (*Client, error) { // fmt.Println("Successfully created a new Client object!") // } func NewClientBase(in NewClientBaseParams) (*Client, error) { - clientOptions := buildClientBaseOptions(in) + controlOptions := buildClientBaseOptions(in) + inferenceOptions := buildInferenceBaseOptions(in) var err error controlHostOverride := valueOrFallback(in.Host, os.Getenv("PINECONE_CONTROLLER_HOST")) @@ -220,25 +219,33 @@ func NewClientBase(in NewClientBaseParams) (*Client, error) { } } - client, err := control.NewClient(valueOrFallback(controlHostOverride, "https://api.pinecone.io"), clientOptions...) + dbControlClient, err := db_control.NewClient(valueOrFallback(controlHostOverride, "https://api.pinecone.io"), controlOptions...) + if err != nil { + return nil, err + } + inferenceClient, err := inference.NewClient(valueOrFallback(controlHostOverride, "https://api.pinecone.io"), inferenceOptions...) if err != nil { return nil, err } - c := Client{Inference: &InferenceService{client: client}, restClient: client, sourceTag: in.SourceTag, headers: in.Headers} + c := Client{ + Inference: &InferenceService{client: inferenceClient}, + restClient: dbControlClient, + baseParams: &in, + } return &c, nil } -// Index creates an IndexConnection to a specified host. +// [Client.Index] creates an [IndexConnection] to a specified host. // // Parameters: -// - in: A NewIndexConnParams object that includes the necessary configuration to create an IndexConnection. +// - in: A [NewIndexConnParams] object that includes the necessary configuration to create an [IndexConnection]. // See NewIndexConnParams for more information. // -// Note: It is important to handle the error returned by this method to ensure that the IndexConnection is created +// Note: It is important to handle the error returned by this method to ensure that the [IndexConnection] is created // successfully before making data plane calls. // -// Returns a pointer to an IndexConnection instance or an error. +// Returns a pointer to an [IndexConnection] instance or an error. // // Example: // @@ -298,11 +305,18 @@ func (c *Client) Index(in NewIndexConnParams, dialOpts ...grpc.DialOption) (*Ind in.AdditionalMetadata[key] = value } + dbDataOptions := buildDataClientBaseOptions(*c.baseParams) + dbDataClient, err := db_data_rest.NewClient(ensureHostHasHttps(in.Host), dbDataOptions...) + if err != nil { + return nil, err + } + idx, err := newIndexConnection(newIndexParameters{ host: in.Host, namespace: in.Namespace, - sourceTag: c.sourceTag, + sourceTag: c.baseParams.SourceTag, additionalMetadata: in.AdditionalMetadata, + dbDataClient: dbDataClient, }, dialOpts...) if err != nil { return nil, err @@ -310,7 +324,17 @@ func (c *Client) Index(in NewIndexConnParams, dialOpts ...grpc.DialOption) (*Ind return idx, nil } -// ListIndexes retrieves a list of all Indexes in a Pinecone [project]. +func ensureHostHasHttps(host string) string { + if strings.HasPrefix("http://", host) { + return strings.Replace(host, "http://", "https://", 1) + } else if !strings.HasPrefix("https://", host) { + return "https://" + host + } + + return host +} + +// [Client.ListIndexes] retrieves a list of all Indexes in a Pinecone [project]. // // Parameters: // - ctx: A context.Context object controls the request's lifetime, allowing for the request @@ -354,7 +378,7 @@ func (c *Client) ListIndexes(ctx context.Context) ([]*Index, error) { return nil, handleErrorResponseBody(res, "failed to list indexes: ") } - var indexList control.IndexList + var indexList db_control.IndexList err = json.NewDecoder(res.Body).Decode(&indexList) if err != nil { return nil, err @@ -368,23 +392,23 @@ func (c *Client) ListIndexes(ctx context.Context) ([]*Index, error) { return indexes, nil } -// CreatePodIndexRequest holds the parameters for creating a new pods-based Index. +// [CreatePodIndexRequest] holds the parameters for creating a new pods-based Index. // // Fields: -// - Name: (Required) The name of the Index. Resource name must be 1-45 characters long, +// - Name: (Required) The name of the [Index]. Resource name must be 1-45 characters long, // start and end with an alphanumeric character, // and consist only of lower case alphanumeric characters or '-'. // - Dimension: (Required) The [dimensionality] of the vectors to be inserted in the Index. // - Metric: (Required) The distance metric to be used for [similarity] search. You can use // 'euclidean', 'cosine', or 'dotproduct'. // - Environment: (Required) The [cloud environment] where the Index will be hosted. -// - PodType: (Required) The [type of pod] to use for the Index. One of `s1`, `p1`, or `p2` appended with `.` and +// - PodType: (Required) The [type of pod] to use for the [Index]. One of `s1`, `p1`, or `p2` appended with `.` and // one of `x1`, `x2`, `x4`, or `x8`. // - Shards: (Optional) The number of shards to use for the Index (defaults to 1). // Shards split your data across multiple pods, so you can fit more data into an Index. // - Replicas: (Optional) The number of [replicas] to use for the Index (defaults to 1). Replicas duplicate your Index. // They provide higher availability and throughput. Replicas can be scaled up or down as your needs change. -// - SourceCollection: (Optional) The name of the Collection to be used as the source for the Index. +// - SourceCollection: (Optional) The name of the [Collection] to be used as the source for the Index. // - MetadataConfig: (Optional) The [metadata configuration] for the behavior of Pinecone's internal metadata Index. By // default, all metadata is indexed; when `metadata_config` is present, // only specified metadata fields are indexed. These configurations are @@ -392,7 +416,7 @@ func (c *Client) ListIndexes(ctx context.Context) ([]*Index, error) { // - DeletionProtection: (Optional) determines whether [deletion protection] is "enabled" or "disabled" for the index. // When "enabled", the index cannot be deleted. Defaults to "disabled". // -// To create a new pods-based Index, use the CreatePodIndex method on the Client object. +// To create a new pods-based Index, use the [Client.CreatePodIndex] method. // // Example: // @@ -451,31 +475,31 @@ type CreatePodIndexRequest struct { MetadataConfig *PodSpecMetadataConfig } -// ReplicaCount ensures the replica count of a pods-based Index is >1. -// It returns a pointer to the number of replicas on a CreatePodIndexRequest object. +// [CreatePodIndexRequestReplicaCount] ensures the replica count of a pods-based Index is >1. +// It returns a pointer to the number of replicas on a [CreatePodIndexRequest] object. func (req CreatePodIndexRequest) ReplicaCount() int32 { return minOne(req.Replicas) } -// ShardCount ensures the number of shards on a pods-based Index is >1. It returns a pointer to the number of shards on -// a CreatePodIndexRequest object. +// [CreatePodIndexRequestShardCount] ensures the number of shards on a pods-based Index is >1. It returns a pointer to the number of shards on +// a [CreatePodIndexRequest] object. func (req CreatePodIndexRequest) ShardCount() int32 { return minOne(req.Shards) } -// TotalCount calculates and returns the total number of pods (replicas*shards) on a CreatePodIndexRequest object. +// [CreatePodIndexRequest.TotalCount] calculates and returns the total number of pods (replicas*shards) on a [CreatePodIndexRequest] object. func (req CreatePodIndexRequest) TotalCount() int { return int(req.ReplicaCount() * req.ShardCount()) } -// CreatePodIndex creates and initializes a new pods-based Index via the specified Client. +// [Client.CreatePodIndex] creates and initializes a new pods-based Index via the specified [Client]. // // Parameters: // - ctx: A context.Context object controls the request's lifetime, allowing for the request // to be canceled or to timeout according to the context's deadline. -// - in: A pointer to a CreatePodIndexRequest object. See CreatePodIndexRequest for more information. +// - in: A pointer to a [CreatePodIndexRequest] object. See [CreatePodIndexRequest] for more information. // -// Returns a pointer to an Index object or an error. +// Returns a pointer to an [Index] object or an error. // // Example: // @@ -518,23 +542,26 @@ func (c *Client) CreatePodIndex(ctx context.Context, in *CreatePodIndexRequest) return nil, fmt.Errorf("fields Name, Dimension, Metric, Environment, and Podtype must be included in CreatePodIndexRequest") } - deletionProtection := pointerOrNil(control.DeletionProtection(in.DeletionProtection)) - metric := pointerOrNil(control.CreateIndexRequestMetric(in.Metric)) + deletionProtection := pointerOrNil(db_control.DeletionProtection(in.DeletionProtection)) + metric := pointerOrNil(db_control.CreateIndexRequestMetric(in.Metric)) + pods := in.TotalCount() + replicas := in.ReplicaCount() + shards := in.ShardCount() - req := control.CreateIndexRequest{ + req := db_control.CreateIndexRequest{ Name: in.Name, Dimension: in.Dimension, Metric: metric, DeletionProtection: deletionProtection, } - req.Spec = control.IndexSpec{ - Pod: &control.PodSpec{ + req.Spec = db_control.IndexSpec{ + Pod: &db_control.PodSpec{ Environment: in.Environment, PodType: in.PodType, - Pods: in.TotalCount(), - Replicas: in.ReplicaCount(), - Shards: in.ShardCount(), + Pods: &pods, + Replicas: &replicas, + Shards: &shards, SourceCollection: in.SourceCollection, }, } @@ -560,21 +587,21 @@ func (c *Client) CreatePodIndex(ctx context.Context, in *CreatePodIndexRequest) return decodeIndex(res.Body) } -// CreateServerlessIndexRequest holds the parameters for creating a new [Serverless] Index. +// [CreateServerlessIndexRequest] holds the parameters for creating a new [Serverless] Index. // // Fields: -// - Name: (Required) The name of the Index. Resource name must be 1-45 characters long, +// - Name: (Required) The name of the [Index]. Resource name must be 1-45 characters long, // start and end with an alphanumeric character, // and consist only of lower case alphanumeric characters or '-'. -// - Dimension: (Required) The [dimensionality] of the vectors to be inserted in the Index. +// - Dimension: (Required) The [dimensionality] of the vectors to be inserted in the [Index]. // - Metric: (Required) The metric used to measure the [similarity] between vectors ('euclidean', 'cosine', or 'dotproduct'). -// - Cloud: (Required) The public [cloud provider] where you would like your Index hosted. -// For serverless Indexes, you define only the cloud and region where the Index should be hosted. -// - Region: (Required) The [region] where you would like your Index to be created. +// - Cloud: (Required) The public [cloud provider] where you would like your [Index] hosted. +// For serverless Indexes, you define only the cloud and region where the [Index] should be hosted. +// - Region: (Required) The [region] where you would like your [Index] to be created. // - DeletionProtection: (Optional) Determines whether [deletion protection] is "enabled" or "disabled" for the index. // When "enabled", the index cannot be deleted. Defaults to "disabled". // -// To create a new Serverless Index, use the CreateServerlessIndex method on the Client object. +// To create a new Serverless Index, use the [Client.CreateServerlessIndex] method. // // Example: // @@ -623,14 +650,14 @@ type CreateServerlessIndexRequest struct { Region string } -// CreateServerlessIndex creates and initializes a new serverless Index via the specified Client. +// [Client.CreateServerlessIndex] creates and initializes a new serverless Index via the specified [Client]. // // Parameters: // - ctx: A context.Context object controls the request's lifetime, allowing for the request // to be canceled or to timeout according to the context's deadline. -// - in: A pointer to a CreateServerlessIndexRequest object. See CreateServerlessIndexRequest for more information. +// - in: A pointer to a [CreateServerlessIndexRequest] object. See [CreateServerlessIndexRequest] for more information. // -// Returns a pointer to an Index object or an error. +// Returns a pointer to an [Index] object or an error. // // Example: // @@ -668,17 +695,17 @@ func (c *Client) CreateServerlessIndex(ctx context.Context, in *CreateServerless return nil, fmt.Errorf("fields Name, Dimension, Metric, Cloud, and Region must be included in CreateServerlessIndexRequest") } - deletionProtection := pointerOrNil(control.DeletionProtection(in.DeletionProtection)) - metric := pointerOrNil(control.CreateIndexRequestMetric(in.Metric)) + deletionProtection := pointerOrNil(db_control.DeletionProtection(in.DeletionProtection)) + metric := pointerOrNil(db_control.CreateIndexRequestMetric(in.Metric)) - req := control.CreateIndexRequest{ + req := db_control.CreateIndexRequest{ Name: in.Name, Dimension: in.Dimension, Metric: metric, DeletionProtection: deletionProtection, - Spec: control.IndexSpec{ - Serverless: &control.ServerlessSpec{ - Cloud: control.ServerlessSpecCloud(in.Cloud), + Spec: db_control.IndexSpec{ + Serverless: &db_control.ServerlessSpec{ + Cloud: db_control.ServerlessSpecCloud(in.Cloud), Region: in.Region, }, }, @@ -697,14 +724,14 @@ func (c *Client) CreateServerlessIndex(ctx context.Context, in *CreateServerless return decodeIndex(res.Body) } -// DescribeIndex retrieves information about a specific Index. See Index for more information. +// [Client.DescribeIndex] retrieves information about a specific [Index]. See [Index] for more information. // // Parameters: // - ctx: A context.Context object controls the request's lifetime, allowing for the request // to be canceled or to timeout according to the context's deadline. -// - idxName: The name of the Index to describe. +// - idxName: The name of the [Index] to describe. // -// Returns a pointer to an Index object or an error. +// Returns a pointer to an [Index] object or an error. // // Example: // @@ -749,12 +776,12 @@ func (c *Client) DescribeIndex(ctx context.Context, idxName string) (*Index, err return decodeIndex(res.Body) } -// DeleteIndex deletes a specific Index. +// [Client.DeleteIndex] deletes a specific [Index]. // // Parameters: // - ctx: A context.Context object controls the request's lifetime, allowing for the request // to be canceled or to timeout according to the context's deadline. -// - idxName: The name of the Index to delete. +// - idxName: The name of the [Index] to delete. // // Returns an error if the deletion fails. // @@ -796,8 +823,8 @@ func (c *Client) DeleteIndex(ctx context.Context, idxName string) error { return nil } -// ConfigureIndexParams contains parameters for configuring an index. For both pod-based -// and serverless indexes you can configure the DeletionProtection status for an index. +// [ConfigureIndexParams] contains parameters for configuring an [Index]. For both pod-based +// and serverless indexes you can configure the DeletionProtection status for an [Index]. // For pod-based indexes you can also configure the number of Replicas and the PodType. // Each of the fields is optional, but at least one field must be set. // See [scale a pods-based index] for more information. @@ -839,19 +866,19 @@ type ConfigureIndexParams struct { DeletionProtection DeletionProtection } -// ConfigureIndex is used to [scale a pods-based index] up or down by changing the size of the pods or the number of -// replicas, or to enable and disable deletion protection for an index. +// [Client.ConfigureIndex] is used to [scale a pods-based index] up or down by changing the size of the pods or the number of +// replicas, or to enable and disable deletion protection for an [Index]. // // Parameters: // - ctx: A context.Context object controls the request's lifetime, allowing for the request // to be canceled or to timeout according to the context's deadline. -// - name: The name of the index to configure. -// - in: A pointer to a ConfigureIndexParams object that contains the parameters for configuring the index. +// - name: The name of the [Index] to configure. +// - in: A pointer to a ConfigureIndexParams object that contains the parameters for configuring the [Index]. // -// Note: You can only scale an index up, not down. If you want to scale an index down, +// Note: You can only scale an [Index] up, not down. If you want to scale an [Index] down, // you must create a new index with the desired configuration. // -// Returns a pointer to a configured Index object or an error. +// Returns a pointer to a configured [Index] object or an error. // // Example: // @@ -889,7 +916,7 @@ func (c *Client) ConfigureIndex(ctx context.Context, name string, in ConfigureIn replicas := pointerOrNil(in.Replicas) deletionProtection := pointerOrNil(in.DeletionProtection) - var request control.ConfigureIndexRequest + var request db_control.ConfigureIndexRequest if podType != nil || replicas != nil { request.Spec = &struct { @@ -907,7 +934,7 @@ func (c *Client) ConfigureIndex(ctx context.Context, name string, in ConfigureIn }, } } - request.DeletionProtection = (*control.DeletionProtection)(deletionProtection) + request.DeletionProtection = (*db_control.DeletionProtection)(deletionProtection) res, err := c.restClient.ConfigureIndex(ctx, name, request) if err != nil { @@ -923,7 +950,7 @@ func (c *Client) ConfigureIndex(ctx context.Context, name string, in ConfigureIn return decodeIndex(res.Body) } -// ListCollections retrieves a list of all Collections in a Pinecone [project]. See Collection for more information. +// [Client.ListCollections] retrieves a list of all Collections in a Pinecone [project]. See [understanding collections] for more information. // // Parameters: // - ctx: A context.Context object controls the request's lifetime, allowing for the request @@ -964,7 +991,7 @@ func (c *Client) ConfigureIndex(ctx context.Context, name string, in ConfigureIn // } // // [project]: https://docs.pinecone.io/guides/projects/understanding-projects -// [Collection]: https://docs.pinecone.io/guides/indexes/understanding-collections +// [understanding collections]: https://docs.pinecone.io/guides/indexes/understanding-collections func (c *Client) ListCollections(ctx context.Context) ([]*Collection, error) { res, err := c.restClient.ListCollections(ctx) if err != nil { @@ -976,7 +1003,7 @@ func (c *Client) ListCollections(ctx context.Context) ([]*Collection, error) { return nil, handleErrorResponseBody(res, "failed to list collections: ") } - var collectionsResponse control.CollectionList + var collectionsResponse db_control.CollectionList if err := json.NewDecoder(res.Body).Decode(&collectionsResponse); err != nil { return nil, err } @@ -989,24 +1016,25 @@ func (c *Client) ListCollections(ctx context.Context) ([]*Collection, error) { return collections, nil } -// DescribeCollection retrieves information about a specific [Collection]. +// [Client.DescribeCollection] retrieves information about a specific [Collection]. See [understanding collections] +// for more information. // // Parameters: // - ctx: A context.Context object controls the request's lifetime, allowing for the request // to be canceled or to timeout according to the context's deadline. -// - collectionName: The name of the Collection to describe. +// - collectionName: The name of the [Collection] to describe. // -// Returns a pointer to a Collection object or an error. +// Returns a pointer to a [Collection] object or an error. // // Note: Collections are only available for pods-based Indexes. // -// Since the returned value is a pointer to a Collection object, it will have the following fields: -// - Name: The name of the Collection. -// - Size: The size of the Collection in bytes. -// - Status: The status of the Collection. -// - Dimension: The [dimensionality] of the vectors stored in each record held in the Collection. -// - VectorCount: The number of records stored in the Collection. -// - Environment: The cloud environment where the Collection is hosted. +// Since the returned value is a pointer to a [Collection] object, it will have the following fields: +// - Name: The name of the [Collection]. +// - Size: The size of the [Collection] in bytes. +// - Status: The status of the [Collection]. +// - Dimension: The [dimensionality] of the vectors stored in each record held in the [Collection]. +// - VectorCount: The number of records stored in the [Collection]. +// - Environment: The cloud environment where the [Collection] is hosted. // // Example: // @@ -1032,7 +1060,7 @@ func (c *Client) ListCollections(ctx context.Context) ([]*Collection, error) { // } // // [dimensionality]: https://docs.pinecone.io/guides/indexes/choose-a-pod-type-and-size#dimensionality-of-vectors -// [Collection]: https://docs.pinecone.io/guides/indexes/understanding-collections +// [understanding collections]: https://docs.pinecone.io/guides/indexes/understanding-collections func (c *Client) DescribeCollection(ctx context.Context, collectionName string) (*Collection, error) { res, err := c.restClient.DescribeCollection(ctx, collectionName) if err != nil { @@ -1047,13 +1075,13 @@ func (c *Client) DescribeCollection(ctx context.Context, collectionName string) return decodeCollection(res.Body) } -// CreateCollectionRequest holds the parameters for creating a new [Collection]. +// [CreateCollectionRequest] holds the parameters for creating a new [Collection]. // // Fields: -// - Name: (Required) The name of the Collection. -// - Source: (Required) The name of the Index to be used as the source for the Collection. +// - Name: (Required) The name of the [Collection]. +// - Source: (Required) The name of the Index to be used as the source for the [Collection]. // -// To create a new Collection, use the CreateCollection method on the Client object. +// To create a new [Collection], use the [Client.CreateCollection] method. // // Note: Collections are only available for pods-based Indexes. // @@ -1082,23 +1110,21 @@ func (c *Client) DescribeCollection(ctx context.Context, collectionName string) // } else { // fmt.Printf("Successfully created collection \"%s\".", collection.Name) // } -// -// [Collection]: https://docs.pinecone.io/guides/indexes/understanding-collections type CreateCollectionRequest struct { Name string Source string } -// CreateCollection creates and initializes a new [Collection] via the specified Client. +// [Client.CreateCollection] creates and initializes a new [Collection] via the specified [Client]. // // Parameters: // - ctx: A context.Context object controls the request's lifetime, allowing for the request // to be canceled or to timeout according to the context's deadline. -// - in: A pointer to a CreateCollectionRequest object. +// - in: A pointer to a [CreateCollectionRequest] object. // // Note: Collections are only available for pods-based Indexes. // -// Returns a pointer to a Collection object or an error. +// Returns a pointer to a [Collection] object or an error. // // Example: // @@ -1125,14 +1151,12 @@ type CreateCollectionRequest struct { // } else { // fmt.Printf("Successfully created collection \"%s\".", collection.Name) // } -// -// [Collection]: https://docs.pinecone.io/guides/indexes/understanding-collections func (c *Client) CreateCollection(ctx context.Context, in *CreateCollectionRequest) (*Collection, error) { if in.Source == "" || in.Name == "" { return nil, fmt.Errorf("fields Name and Source must be included in CreateCollectionRequest") } - req := control.CreateCollectionRequest{ + req := db_control.CreateCollectionRequest{ Name: in.Name, Source: in.Source, } @@ -1150,12 +1174,12 @@ func (c *Client) CreateCollection(ctx context.Context, in *CreateCollectionReque return decodeCollection(res.Body) } -// DeleteCollection deletes a specific [Collection] +// [Client.DeleteCollection] deletes a specific [Collection] // // Parameters: // - ctx: A context.Context object controls the request's lifetime, allowing for the request // to be canceled or to timeout according to the context's deadline. -// - collectionName: The name of the Collection to delete. +// - collectionName: The name of the [Collection] to delete. // // Note: Collections are only available for pods-based Indexes. // @@ -1185,8 +1209,6 @@ func (c *Client) CreateCollection(ctx context.Context, in *CreateCollectionReque // } else { // log.Printf("Successfully deleted collection \"%s\"\n", collectionName) // } -// -// [Collection]: https://docs.pinecone.io/guides/indexes/understanding-collections func (c *Client) DeleteCollection(ctx context.Context, collectionName string) error { res, err := c.restClient.DeleteCollection(ctx, collectionName) if err != nil { @@ -1201,7 +1223,15 @@ func (c *Client) DeleteCollection(ctx context.Context, collectionName string) er return nil } -// EmbedRequest holds the parameters for generating embeddings for a list of input strings. +// [InferenceService] is a struct which exposes methods for interacting with the Pinecone Inference API. [InferenceService] +// can be accessed via the Client object through the Client.Inference namespace. +// +// [Pinecone Inference API]: https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models +type InferenceService struct { + client *inference.Client +} + +// [EmbedRequest] holds the parameters for generating embeddings for a list of input strings. // // Fields: // - Model: (Required) The model to use for generating embeddings. @@ -1213,7 +1243,7 @@ type EmbedRequest struct { Parameters EmbedParameters } -// EmbedParameters contains model-specific parameters that can be used for generating embeddings. +// [EmbedParameters] contains model-specific parameters that can be used for generating embeddings. // // Fields: // - InputType: (Optional) A common property used to distinguish between different types of data. For example, "passage", or "query". @@ -1224,23 +1254,31 @@ type EmbedParameters struct { Truncate string } -// InferenceService is a struct which exposes methods for interacting with the Pinecone Inference API. InferenceService -// can be accessed via the Client object through the Client.Inference namespace. +// [EmbedResponse] represents holds the embeddings generated for a single input. // -// [Pinecone Inference API]: https://docs.pinecone.io/guides/inference/understanding-inference#embedding-models -type InferenceService struct { - client *control.Client +// Fields: +// - Data: A list of [Embedding] objects containing the embeddings generated for the input. +// - Model: The model used to generate the embeddings. +// - Usage: Usage statistics ([Total Tokens]) for the request. +// +// [Total Tokens]: https://docs.pinecone.io/guides/organizations/manage-cost/understanding-cost#embed +type EmbedResponse struct { + Data []Embedding `json:"data"` + Model string `json:"model"` + Usage struct { + TotalTokens *int `json:"total_tokens,omitempty"` + } `json:"usage"` } -// Embed generates embeddings for a list of inputs using the specified model and (optional) parameters. +// [InferenceService.Embed] generates embeddings for a list of inputs using the specified model and (optional) parameters. // // Parameters: // - ctx: A context.Context object controls the request's lifetime, allowing for the request // to be canceled or to timeout according to the context's deadline. -// - in: A pointer to an EmbedRequest object that contains the model t4o use for embedding generation, the +// - in: A pointer to an EmbedRequest object that contains the model to use for embedding generation, the // list of input strings to generate embeddings for, and any additional parameters to use for generation. // -// Returns a pointer to an EmbeddingsList object or an error. +// Returns a pointer to an [EmbeddingsList] object or an error. // // Example: // @@ -1274,7 +1312,7 @@ type InferenceService struct { // } else { // fmt.Printf("Successfull generated embeddings: %+v", res) // } -func (i *InferenceService) Embed(ctx context.Context, in *EmbedRequest) (*control.EmbeddingsList, error) { +func (i *InferenceService) Embed(ctx context.Context, in *EmbedRequest) (*EmbedResponse, error) { if len(in.TextInputs) == 0 { return nil, fmt.Errorf("TextInputs must contain at least one value") @@ -1290,7 +1328,7 @@ func (i *InferenceService) Embed(ctx context.Context, in *EmbedRequest) (*contro }{Text: &input} } - req := control.EmbedRequest{ + req := inference.EmbedRequest{ Model: in.Model, Inputs: convertedInputs, } @@ -1319,6 +1357,130 @@ func (i *InferenceService) Embed(ctx context.Context, in *EmbedRequest) (*contro return decodeEmbeddingsList(res.Body) } +// [Document] is a map representing the document to be reranked. +type Document map[string]string + +// [RerankRequest] holds the parameters for calling [InferenceService.Rerank] and reranking documents +// by a specified query and model. +// +// Fields: +// - Model: "The [model] to use for reranking. +// - Query: (Required) The query to rerank Documents against. +// - Documents: (Required) A list of Document objects to be reranked. The default is "text", but you can +// specify this behavior with [RerankRequest.RankFields]. +// - RankFields: (Optional) The fields to rank the Documents by. If not provided, the default is "text". +// - ReturnDocuments: (Optional) Whether to include Documents in the response. Defaults to true. +// - TopN: (Optional) How many Documents to return. Defaults to the length of input Documents. +// - Parameters: (Optional) Additional model-specific parameters for the reranker +// +// [model]: https://docs.pinecone.io/guides/inference/understanding-inference#models +type RerankRequest struct { + Model string + Query string + Documents []Document + RankFields *[]string + ReturnDocuments *bool + TopN *int + Parameters *map[string]string +} + +// Represents a ranked document with a relevance score and an index position. +// +// Fields: +// - Document: The [Document]. +// - Index: The index position of the Document from the original request. This can be used +// to locate the position of the document relative to others described in the request. +// - Score: The relevance score of the Document indicating how closely it matches the query. +type RankedDocument struct { + Document *Document `json:"document,omitempty"` + Index int `json:"index"` + Score float32 `json:"score"` +} + +// [RerankResponse] is the result of a reranking operation. +// +// Fields: +// - Data: A list of [RankedDocument] objects which have been reranked. The RankedDocuments are sorted in order of relevance, +// with the first being the most relevant. +// - Model: The model used to rerank documents. +// - Usage: Usage statistics ([Rerank Units]) for the reranking operation. +// +// [Read Units]: https://docs.pinecone.io/guides/organizations/manage-cost/understanding-cost#rerank +type RerankResponse struct { + Data []RankedDocument `json:"data,omitempty"` + Model string `json:"model"` + Usage RerankUsage `json:"usage"` +} + +// [InferenceService.Rerank] reranks documents with associated relevance scores that represent the relevance of each [Document] +// to the provided query using the specified model. +// +// Parameters: +// - ctx: A context.Context object controls the request's lifetime, allowing for the request +// to be canceled or to timeout according to the context's deadline. +// - in: A pointer to a [RerankRequest] object that contains the model, query, and documents to use for reranking. +// +// Example: +// +// ctx := context.Background() +// +// clientParams := pinecone.NewClientParams{ +// ApiKey: "YOUR_API_KEY", +// SourceTag: "your_source_identifier", // optional +// } +// +// pc, err := pinecone.NewClient(clientParams) +// if err != nil { +// log.Fatalf("Failed to create Client: %v", err) +// } +// +// rerankModel := "bge-reranker-v2-m3" +// topN := 2 +// retunDocuments := true +// documents := []pinecone.Document{ +// {"id": "doc1", "text": "Apple is a popular fruit known for its sweetness and crisp texture."}, +// {"id": "doc2", "text": "Many people enjoy eating apples as a healthy snack."}, +// {"id": "doc3", "text": "Apple Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces."}, +// {"id": "doc4", "text": "An apple a day keeps the doctor away, as the saying goes."}, +// } +// +// ranking, err := pc.Inference.Rerank(ctx, &pinecone.RerankRequest{ +// Model: rerankModel, +// Query: "i love to eat apples", +// ReturnDocuments: &retunDocuments, +// TopN: &topN, +// RankFields: &[]string{"text"}, +// Documents: documents, +// }) +// if err != nil { +// log.Fatalf("Failed to rerank: %v", err) +// } +// fmt.Printf("Rerank result: %+v\n", ranking) +func (i *InferenceService) Rerank(ctx context.Context, in *RerankRequest) (*RerankResponse, error) { + convertedDocuments := make([]inference.Document, len(in.Documents)) + for i, doc := range in.Documents { + convertedDocuments[i] = inference.Document(doc) + } + req := inference.RerankJSONRequestBody{ + Model: in.Model, + Query: in.Query, + Documents: convertedDocuments, + RankFields: in.RankFields, + ReturnDocuments: in.ReturnDocuments, + TopN: in.TopN, + Parameters: in.Parameters, + } + res, err := i.client.Rerank(ctx, req) + if err != nil { + return nil, err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return nil, handleErrorResponseBody(res, "failed to rerank: ") + } + return decodeRerankResponse(res.Body) +} + func (c *Client) extractAuthHeader() map[string]string { possibleAuthKeys := []string{ "api-key", @@ -1326,7 +1488,7 @@ func (c *Client) extractAuthHeader() map[string]string { "access_token", } - for key, value := range c.headers { + for key, value := range c.baseParams.Headers { for _, checkKey := range possibleAuthKeys { if strings.ToLower(key) == checkKey { return map[string]string{key: value} @@ -1337,7 +1499,7 @@ func (c *Client) extractAuthHeader() map[string]string { return nil } -func toIndex(idx *control.IndexModel) *Index { +func toIndex(idx *db_control.IndexModel) *Index { if idx == nil { return nil } @@ -1347,9 +1509,9 @@ func toIndex(idx *control.IndexModel) *Index { spec.Pod = &PodSpec{ Environment: idx.Spec.Pod.Environment, PodType: idx.Spec.Pod.PodType, - PodCount: int32(idx.Spec.Pod.Pods), - Replicas: idx.Spec.Pod.Replicas, - ShardCount: idx.Spec.Pod.Shards, + PodCount: derefOrDefault(idx.Spec.Pod.Pods, 1), + Replicas: derefOrDefault(idx.Spec.Pod.Replicas, 1), + ShardCount: derefOrDefault(idx.Spec.Pod.Shards, 1), SourceCollection: idx.Spec.Pod.SourceCollection, } if idx.Spec.Pod.MetadataConfig != nil { @@ -1380,7 +1542,7 @@ func toIndex(idx *control.IndexModel) *Index { } func decodeIndex(resBody io.ReadCloser) (*Index, error) { - var idx control.IndexModel + var idx db_control.IndexModel err := json.NewDecoder(resBody).Decode(&idx) if err != nil { return nil, fmt.Errorf("failed to decode idx response: %w", err) @@ -1389,8 +1551,8 @@ func decodeIndex(resBody io.ReadCloser) (*Index, error) { return toIndex(&idx), nil } -func decodeEmbeddingsList(resBody io.ReadCloser) (*control.EmbeddingsList, error) { - var embeddingsList control.EmbeddingsList +func decodeEmbeddingsList(resBody io.ReadCloser) (*EmbedResponse, error) { + var embeddingsList EmbedResponse err := json.NewDecoder(resBody).Decode(&embeddingsList) if err != nil { return nil, fmt.Errorf("failed to decode embeddings response: %w", err) @@ -1399,7 +1561,17 @@ func decodeEmbeddingsList(resBody io.ReadCloser) (*control.EmbeddingsList, error return &embeddingsList, nil } -func toCollection(cm *control.CollectionModel) *Collection { +func decodeRerankResponse(resBody io.ReadCloser) (*RerankResponse, error) { + var rerankResponse RerankResponse + err := json.NewDecoder(resBody).Decode(&rerankResponse) + if err != nil { + return nil, fmt.Errorf("failed to decode rerank response: %w", err) + } + + return &rerankResponse, nil +} + +func toCollection(cm *db_control.CollectionModel) *Collection { if cm == nil { return nil } @@ -1415,7 +1587,7 @@ func toCollection(cm *control.CollectionModel) *Collection { } func decodeCollection(resBody io.ReadCloser) (*Collection, error) { - var collectionModel control.CollectionModel + var collectionModel db_control.CollectionModel err := json.NewDecoder(resBody).Decode(&collectionModel) if err != nil { return nil, fmt.Errorf("failed to decode collection response: %w", err) @@ -1424,8 +1596,8 @@ func decodeCollection(resBody io.ReadCloser) (*Collection, error) { return toCollection(&collectionModel), nil } -func decodeErrorResponse(resBodyBytes []byte) (*control.ErrorResponse, error) { - var errorResponse control.ErrorResponse +func decodeErrorResponse(resBodyBytes []byte) (*db_control.ErrorResponse, error) { + var errorResponse db_control.ErrorResponse err := json.Unmarshal(resBodyBytes, &errorResponse) if err != nil { return nil, fmt.Errorf("failed to decode error response: %w", err) @@ -1487,16 +1659,61 @@ func formatError(errMap errorResponseMap) error { return &PineconeError{Code: errMap.StatusCode, Msg: baseError} } -func buildClientBaseOptions(in NewClientBaseParams) []control.ClientOption { - clientOptions := []control.ClientOption{} +func buildClientBaseOptions(in NewClientBaseParams) []db_control.ClientOption { + clientOptions := []db_control.ClientOption{} + headerProviders := buildSharedProviderHeaders(in) - // build and apply user agent header - userAgentProvider := provider.NewHeaderProvider("User-Agent", useragent.BuildUserAgent(in.SourceTag)) - clientOptions = append(clientOptions, control.WithRequestEditorFn(userAgentProvider.Intercept)) + for _, provider := range headerProviders { + clientOptions = append(clientOptions, db_control.WithRequestEditorFn(provider.Intercept)) + } + // apply custom http client if provided + if in.RestClient != nil { + clientOptions = append(clientOptions, db_control.WithHTTPClient(in.RestClient)) + } + + return clientOptions +} + +func buildInferenceBaseOptions(in NewClientBaseParams) []inference.ClientOption { + clientOptions := []inference.ClientOption{} + headerProviders := buildSharedProviderHeaders(in) + + for _, provider := range headerProviders { + clientOptions = append(clientOptions, inference.WithRequestEditorFn(provider.Intercept)) + } + + // apply custom http client if provided + if in.RestClient != nil { + clientOptions = append(clientOptions, inference.WithHTTPClient(in.RestClient)) + } + + return clientOptions +} + +func buildDataClientBaseOptions(in NewClientBaseParams) []db_data_rest.ClientOption { + clientOptions := []db_data_rest.ClientOption{} + headerProviders := buildSharedProviderHeaders(in) + + for _, provider := range headerProviders { + clientOptions = append(clientOptions, db_data_rest.WithRequestEditorFn(provider.Intercept)) + } + + // apply custom http client if provided + if in.RestClient != nil { + clientOptions = append(clientOptions, db_data_rest.WithHTTPClient(in.RestClient)) + } + + return clientOptions +} + +func buildSharedProviderHeaders(in NewClientBaseParams) []*provider.CustomHeader { + providers := []*provider.CustomHeader{} + + // build and apply user agent header + providers = append(providers, provider.NewHeaderProvider("User-Agent", useragent.BuildUserAgent(in.SourceTag))) // build and apply api version header - apiVersionProvider := provider.NewHeaderProvider("X-Pinecone-Api-Version", gen.PineconeApiVersion) - clientOptions = append(clientOptions, control.WithRequestEditorFn(apiVersionProvider.Intercept)) + providers = append(providers, provider.NewHeaderProvider("X-Pinecone-Api-Version", gen.PineconeApiVersion)) // get headers from environment envAdditionalHeaders, hasEnvAdditionalHeaders := os.LookupEnv("PINECONE_ADDITIONAL_HEADERS") @@ -1507,26 +1724,18 @@ func buildClientBaseOptions(in NewClientBaseParams) []control.ClientOption { log.Printf("failed to parse PINECONE_ADDITIONAL_HEADERS: %v", err) } } - // merge headers from parameters if passed with additionalHeaders from environment if in.Headers != nil { for key, value := range in.Headers { additionalHeaders[key] = value } } - - // add headers to client options + // create header providers for key, value := range additionalHeaders { - headerProvider := provider.NewHeaderProvider(key, value) - clientOptions = append(clientOptions, control.WithRequestEditorFn(headerProvider.Intercept)) - } - - // apply custom http client if provided - if in.RestClient != nil { - clientOptions = append(clientOptions, control.WithHTTPClient(in.RestClient)) + providers = append(providers, provider.NewHeaderProvider(key, value)) } - return clientOptions + return providers } func ensureURLScheme(inputURL string) (string, error) { diff --git a/pinecone/client_test.go b/pinecone/client_test.go index de27d83..d8ba61e 100644 --- a/pinecone/client_test.go +++ b/pinecone/client_test.go @@ -13,11 +13,11 @@ import ( "time" "github.com/google/go-cmp/cmp" + "github.com/google/uuid" "github.com/pinecone-io/go-pinecone/internal/gen" - "github.com/pinecone-io/go-pinecone/internal/gen/control" + "github.com/pinecone-io/go-pinecone/internal/gen/db_control" "github.com/pinecone-io/go-pinecone/internal/provider" - "github.com/google/uuid" "github.com/pinecone-io/go-pinecone/internal/utils" "github.com/stretchr/testify/assert" @@ -277,6 +277,11 @@ func (ts *IntegrationTests) TestConfigureIndexHitPodLimit() { } func (ts *IntegrationTests) TestGenerateEmbeddings() { + // Run Embed tests once rather than duplicating across serverless & pods + if ts.indexType == "pod" { + ts.T().Skip("Skipping Embed tests for pods") + } + ctx := context.Background() embeddingModel := "multilingual-e5-large" embeddings, err := ts.client.Inference.Embed(ctx, &EmbedRequest{ @@ -293,9 +298,9 @@ func (ts *IntegrationTests) TestGenerateEmbeddings() { require.NoError(ts.T(), err) require.NotNil(ts.T(), embeddings, "Expected embedding to be non-nil") - require.Equal(ts.T(), embeddingModel, *embeddings.Model, "Expected model to be '%s', but got '%s'", embeddingModel, embeddings.Model) - require.Equal(ts.T(), 2, len(*embeddings.Data), "Expected 2 embeddings") - require.Equal(ts.T(), 1024, len(*(*embeddings.Data)[0].Values), "Expected embeddings to have length 1024") + require.Equal(ts.T(), embeddingModel, embeddings.Model, "Expected model to be '%s', but got '%s'", embeddingModel, embeddings.Model) + require.Equal(ts.T(), 2, len(embeddings.Data), "Expected 2 embeddings") + require.Equal(ts.T(), 1024, len(*embeddings.Data[0].Values), "Expected embeddings to have length 1024") } func (ts *IntegrationTests) TestGenerateEmbeddingsInvalidInputs() { @@ -313,6 +318,164 @@ func (ts *IntegrationTests) TestGenerateEmbeddingsInvalidInputs() { require.Contains(ts.T(), err.Error(), "TextInputs must contain at least one value") } +func (ts *IntegrationTests) TestRerankDocumentDefaultField() { + // Run Rerank tests once rather than duplicating across serverless & pods + if ts.indexType == "pod" { + ts.T().Skip("Skipping Rerank tests for pods") + } + + ctx := context.Background() + rerankModel := "bge-reranker-v2-m3" + topN := 2 + retunDocuments := true + ranking, err := ts.client.Inference.Rerank(ctx, &RerankRequest{ + Model: rerankModel, + Query: "i love apples", + ReturnDocuments: &retunDocuments, + TopN: &topN, + Documents: []Document{ + {"id": "vec1", "text": "Apple is a popular fruit known for its sweetness and crisp texture."}, + {"id": "vec2", "text": "Many people enjoy eating apples as a healthy snack."}, + {"id": "vec3", "text": "Apple Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces."}, + {"id": "vec4", "text": "An apple a day keeps the doctor away, as the saying goes."}, + }}) + + require.NoError(ts.T(), err) + require.NotNil(ts.T(), ranking, "Expected reranking result to be non-nil") + require.Equal(ts.T(), topN, len(ranking.Data), "Expected %v rankings", topN) + + doc := *ranking.Data[0].Document + _, exists := doc["text"] + require.True(ts.T(), exists, "Expected '%s' to exist in Document map", "text") + _, exists = doc["id"] + require.True(ts.T(), exists, "Expected '%s' to exist in Document map", "id") +} + +func (ts *IntegrationTests) TestRerankDocumentCustomField() { + // Run Rerank tests once rather than duplicating across serverless & pods + if ts.indexType == "pod" { + ts.T().Skip("Skipping Rerank tests for pods") + } + + ctx := context.Background() + rerankModel := "bge-reranker-v2-m3" + topN := 2 + retunDocuments := true + ranking, err := ts.client.Inference.Rerank(ctx, &RerankRequest{ + Model: rerankModel, + Query: "i love apples", + ReturnDocuments: &retunDocuments, + TopN: &topN, + RankFields: &[]string{"customField"}, + Documents: []Document{ + {"id": "vec1", "customField": "Apple is a popular fruit known for its sweetness and crisp texture."}, + {"id": "vec2", "customField": "Many people enjoy eating apples as a healthy snack."}, + {"id": "vec3", "customField": "Apple Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces."}, + {"id": "vec4", "customField": "An apple a day keeps the doctor away, as the saying goes."}, + }}) + + require.NoError(ts.T(), err) + require.NotNil(ts.T(), ranking, "Expected reranking result to be non-nil") + require.Equal(ts.T(), topN, len(ranking.Data), "Expected %v rankings", topN) + + doc := *ranking.Data[0].Document + _, exists := doc["customField"] + require.True(ts.T(), exists, "Expected '%s' to exist in Document map", "customField") + _, exists = doc["id"] + require.True(ts.T(), exists, "Expected '%s' to exist in Document map", "id") +} + +func (ts *IntegrationTests) TestRerankDocumentAllDefaults() { + // Run Rerank tests once rather than duplicating across serverless & pods + if ts.indexType == "pod" { + ts.T().Skip("Skipping Rerank tests for pods") + } + + ctx := context.Background() + rerankModel := "bge-reranker-v2-m3" + ranking, err := ts.client.Inference.Rerank(ctx, &RerankRequest{ + Model: rerankModel, + Query: "i love apples", + Documents: []Document{ + {"id": "vec1", "text": "Apple is a popular fruit known for its sweetness and crisp texture."}, + {"id": "vec2", "text": "Many people enjoy eating apples as a healthy snack."}, + {"id": "vec3", "text": "Apple Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces."}, + {"id": "vec4", "text": "An apple a day keeps the doctor away, as the saying goes."}, + }}) + + require.NoError(ts.T(), err) + require.NotNil(ts.T(), ranking, "Expected reranking result to be non-nil") + require.Equal(ts.T(), 4, len(ranking.Data), "Expected %v rankings", 4) + + doc := *ranking.Data[0].Document + _, exists := doc["text"] + require.True(ts.T(), exists, "Expected '%s' to exist in Document map", "text") + _, exists = doc["id"] + require.True(ts.T(), exists, "Expected '%s' to exist in Document map", "id") +} + +func (ts *IntegrationTests) TestRerankDocumentsMultipleRankFields() { + // Run Rerank tests once rather than duplicating across serverless & pods + if ts.indexType == "pod" { + ts.T().Skip("Skipping Rerank tests for pods") + } + + ctx := context.Background() + rerankModel := "bge-reranker-v2-m3" + _, err := ts.client.Inference.Rerank(ctx, &RerankRequest{ + Model: rerankModel, + Query: "i love apples", + RankFields: &[]string{"text", "custom-field"}, + Documents: []Document{ + { + "id": "vec1", + "text": "Apple is a popular fruit known for its sweetness and crisp texture.", + "custom-field": "another field", + }, + { + "id": "vec2", + "text": "Many people enjoy eating apples as a healthy snack.", + "custom-field": "another field", + }, + { + "id": "vec3", + "text": "Apple Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces.", + "custom-field": "another field", + }, + { + "id": "vec4", + "text": "An apple a day keeps the doctor away, as the saying goes.", + "custom-field": "another field", + }, + }}) + + require.Error(ts.T(), err) + require.Contains(ts.T(), err.Error(), "Only one rank field is supported for model") +} + +func (ts *IntegrationTests) TestRerankDocumentFieldError() { + // Run Rerank tests once rather than duplicating across serverless & pods + if ts.indexType == "pod" { + ts.T().Skip("Skipping Rerank tests for pods") + } + + ctx := context.Background() + rerankModel := "bge-reranker-v2-m3" + _, err := ts.client.Inference.Rerank(ctx, &RerankRequest{ + Model: rerankModel, + Query: "i love apples", + RankFields: &[]string{"custom-field"}, + Documents: []Document{ + {"id": "vec1", "text": "Apple is a popular fruit known for its sweetness and crisp texture."}, + {"id": "vec2", "text": "Many people enjoy eating apples as a healthy snack."}, + {"id": "vec3", "text": "Apple Inc. has revolutionized the tech industry with its sleek designs and user-friendly interfaces."}, + {"id": "vec4", "text": "An apple a day keeps the doctor away, as the saying goes."}, + }}) + + require.Error(ts.T(), err) + require.Contains(ts.T(), err.Error(), "field 'custom-field' not found in document") +} + // Unit tests: func TestExtractAuthHeaderUnit(t *testing.T) { globalApiKey := os.Getenv("PINECONE_API_KEY") @@ -388,9 +551,9 @@ func TestNewClientParamsSetUnit(t *testing.T) { client, err := NewClient(NewClientParams{ApiKey: apiKey}) require.NoError(t, err) - require.Empty(t, client.sourceTag, "Expected client to have empty sourceTag") - require.NotNil(t, client.headers, "Expected client headers to not be nil") - apiKeyHeader, ok := client.headers["Api-Key"] + require.Empty(t, client.baseParams.SourceTag, "Expected client to have empty sourceTag") + require.NotNil(t, client.baseParams.Headers, "Expected client headers to not be nil") + apiKeyHeader, ok := client.baseParams.Headers["Api-Key"] require.True(t, ok, "Expected client to have an 'Api-Key' header") require.Equal(t, apiKey, apiKeyHeader, "Expected 'Api-Key' header to match provided ApiKey") require.Equal(t, 3, len(client.restClient.RequestEditors), "Expected client to have correct number of request editors") @@ -405,10 +568,10 @@ func TestNewClientParamsSetSourceTagUnit(t *testing.T) { }) require.NoError(t, err) - apiKeyHeader, ok := client.headers["Api-Key"] + apiKeyHeader, ok := client.baseParams.Headers["Api-Key"] require.True(t, ok, "Expected client to have an 'Api-Key' header") require.Equal(t, apiKey, apiKeyHeader, "Expected 'Api-Key' header to match provided ApiKey") - require.Equal(t, sourceTag, client.sourceTag, "Expected client to have sourceTag '%s', but got '%s'", sourceTag, client.sourceTag) + require.Equal(t, sourceTag, client.baseParams.SourceTag, "Expected client to have sourceTag '%s', but got '%s'", sourceTag, client.baseParams.SourceTag) require.Equal(t, 3, len(client.restClient.RequestEditors), "Expected client to have %s request editors, but got %s", 2, len(client.restClient.RequestEditors)) } @@ -418,10 +581,10 @@ func TestNewClientParamsSetHeadersUnit(t *testing.T) { client, err := NewClient(NewClientParams{ApiKey: apiKey, Headers: headers}) require.NoError(t, err) - apiKeyHeader, ok := client.headers["Api-Key"] + apiKeyHeader, ok := client.baseParams.Headers["Api-Key"] require.True(t, ok, "Expected client to have an 'Api-Key' header") require.Equal(t, apiKey, apiKeyHeader, "Expected 'Api-Key' header to match provided ApiKey") - require.Equal(t, client.headers, headers, "Expected client to have headers '%+v', but got '%+v'", headers, client.headers) + require.Equal(t, client.baseParams.Headers, headers, "Expected client to have headers '%+v', but got '%+v'", headers, client.baseParams.Headers) require.Equal(t, 4, len(client.restClient.RequestEditors), "Expected client to have %s request editors, but got %s", 3, len(client.restClient.RequestEditors)) } @@ -806,12 +969,15 @@ func TestEnsureURLSchemeUnit(t *testing.T) { } func TestToIndexUnit(t *testing.T) { - deletionProtectionEnabled := control.Enabled - deletionProtectionDisabled := control.Disabled + deletionProtectionEnabled := db_control.Enabled + deletionProtectionDisabled := db_control.Disabled + pods := 1 + replicas := int32(1) + shards := int32(1) tests := []struct { name string - originalInput *control.IndexModel + originalInput *db_control.IndexModel expectedOutput *Index }{ { @@ -821,30 +987,30 @@ func TestToIndexUnit(t *testing.T) { }, { name: "pod index input", - originalInput: &control.IndexModel{ + originalInput: &db_control.IndexModel{ Name: "testIndex", Dimension: 128, Host: "test-host", Metric: "cosine", DeletionProtection: &deletionProtectionDisabled, Spec: struct { - Pod *control.PodSpec `json:"pod,omitempty"` - Serverless *control.ServerlessSpec `json:"serverless,omitempty"` + Pod *db_control.PodSpec `json:"pod,omitempty"` + Serverless *db_control.ServerlessSpec `json:"serverless,omitempty"` }(struct { - Pod *control.PodSpec - Serverless *control.ServerlessSpec - }{Pod: &control.PodSpec{ + Pod *db_control.PodSpec + Serverless *db_control.ServerlessSpec + }{Pod: &db_control.PodSpec{ Environment: "test-environ", PodType: "p1.x2", - Pods: 1, - Replicas: 1, - Shards: 1, + Pods: &pods, + Replicas: &replicas, + Shards: &shards, SourceCollection: nil, MetadataConfig: nil, }}), Status: struct { - Ready bool `json:"ready"` - State control.IndexModelStatusState `json:"state"` + Ready bool `json:"ready"` + State db_control.IndexModelStatusState `json:"state"` }{ Ready: true, State: "active", @@ -874,25 +1040,25 @@ func TestToIndexUnit(t *testing.T) { }, { name: "serverless index input", - originalInput: &control.IndexModel{ + originalInput: &db_control.IndexModel{ Name: "testIndex", Dimension: 128, Host: "test-host", Metric: "cosine", DeletionProtection: &deletionProtectionEnabled, Spec: struct { - Pod *control.PodSpec `json:"pod,omitempty"` - Serverless *control.ServerlessSpec `json:"serverless,omitempty"` + Pod *db_control.PodSpec `json:"pod,omitempty"` + Serverless *db_control.ServerlessSpec `json:"serverless,omitempty"` }(struct { - Pod *control.PodSpec - Serverless *control.ServerlessSpec - }{Serverless: &control.ServerlessSpec{ + Pod *db_control.PodSpec + Serverless *db_control.ServerlessSpec + }{Serverless: &db_control.ServerlessSpec{ Cloud: "test-environ", Region: "test-region", }}), Status: struct { - Ready bool `json:"ready"` - State control.IndexModelStatusState `json:"state"` + Ready bool `json:"ready"` + State db_control.IndexModelStatusState `json:"state"` }{ Ready: true, State: "active", @@ -936,7 +1102,7 @@ func TestToCollectionUnit(t *testing.T) { tests := []struct { name string - originalInput *control.CollectionModel + originalInput *db_control.CollectionModel expectedOutput *Collection }{ { @@ -946,7 +1112,7 @@ func TestToCollectionUnit(t *testing.T) { }, { name: "collection input", - originalInput: &control.CollectionModel{ + originalInput: &db_control.CollectionModel{ Dimension: &dimension, Name: "testCollection", Environment: "test-environ", @@ -965,7 +1131,7 @@ func TestToCollectionUnit(t *testing.T) { }, { name: "collection input", - originalInput: &control.CollectionModel{ + originalInput: &db_control.CollectionModel{ Dimension: &dimension, Name: "testCollection", Environment: "test-environ", @@ -1072,7 +1238,7 @@ func TestNewClientUnit(t *testing.T) { } else { assert.NoError(t, err) assert.NotNil(t, client) - assert.Equal(t, tc.expectedHeaders, client.headers, "Expected headers to be '%v', but got '%v'", tc.expectedHeaders, client.headers) + assert.Equal(t, tc.expectedHeaders, client.baseParams.Headers, "Expected headers to be '%v', but got '%v'", tc.expectedHeaders, client.baseParams.Headers) } }) } @@ -1149,7 +1315,7 @@ func TestBuildClientBaseOptionsUnit(t *testing.T) { name string params NewClientBaseParams envHeaders string - expect []control.ClientOption + expect []db_control.ClientOption expectEnvUnset bool }{ { @@ -1158,10 +1324,10 @@ func TestBuildClientBaseOptionsUnit(t *testing.T) { SourceTag: "source-tag", Headers: map[string]string{"Param-Header": "param-value"}, }, - expect: []control.ClientOption{ - control.WithRequestEditorFn(provider.NewHeaderProvider("User-Agent", "test-user-agent").Intercept), - control.WithRequestEditorFn(provider.NewHeaderProvider("X-Pinecone-Api-Version", gen.PineconeApiVersion).Intercept), - control.WithRequestEditorFn(provider.NewHeaderProvider("Param-Header", "param-value").Intercept), + expect: []db_control.ClientOption{ + db_control.WithRequestEditorFn(provider.NewHeaderProvider("User-Agent", "test-user-agent").Intercept), + db_control.WithRequestEditorFn(provider.NewHeaderProvider("X-Pinecone-Api-Version", gen.PineconeApiVersion).Intercept), + db_control.WithRequestEditorFn(provider.NewHeaderProvider("Param-Header", "param-value").Intercept), }, expectEnvUnset: true, }, @@ -1172,11 +1338,11 @@ func TestBuildClientBaseOptionsUnit(t *testing.T) { Headers: map[string]string{"Param-Header": "param-value"}, }, envHeaders: `{"Env-Header": "env-value"}`, - expect: []control.ClientOption{ - control.WithRequestEditorFn(provider.NewHeaderProvider("Env-Header", "env-value").Intercept), - control.WithRequestEditorFn(provider.NewHeaderProvider("X-Pinecone-Api-Version", gen.PineconeApiVersion).Intercept), - control.WithRequestEditorFn(provider.NewHeaderProvider("User-Agent", "test-user-agent").Intercept), - control.WithRequestEditorFn(provider.NewHeaderProvider("Param-Header", "param-value").Intercept), + expect: []db_control.ClientOption{ + db_control.WithRequestEditorFn(provider.NewHeaderProvider("Env-Header", "env-value").Intercept), + db_control.WithRequestEditorFn(provider.NewHeaderProvider("X-Pinecone-Api-Version", gen.PineconeApiVersion).Intercept), + db_control.WithRequestEditorFn(provider.NewHeaderProvider("User-Agent", "test-user-agent").Intercept), + db_control.WithRequestEditorFn(provider.NewHeaderProvider("Param-Header", "param-value").Intercept), }, }, } diff --git a/pinecone/index_connection.go b/pinecone/index_connection.go index 3fc5202..7a865ae 100644 --- a/pinecone/index_connection.go +++ b/pinecone/index_connection.go @@ -3,12 +3,16 @@ package pinecone import ( "context" "crypto/tls" + "encoding/json" "fmt" + "io" "log" + "net/http" "net/url" "strings" - "github.com/pinecone-io/go-pinecone/internal/gen/data" + db_data_grpc "github.com/pinecone-io/go-pinecone/internal/gen/db_data/grpc" + db_data_rest "github.com/pinecone-io/go-pinecone/internal/gen/db_data/rest" "github.com/pinecone-io/go-pinecone/internal/useragent" "google.golang.org/grpc" "google.golang.org/grpc/credentials" @@ -16,7 +20,8 @@ import ( "google.golang.org/grpc/metadata" ) -// IndexConnection holds the parameters for a Pinecone IndexConnection object. +// [IndexConnection] holds the parameters for a Pinecone [IndexConnection] object. You can +// instantiate an [IndexConnection] by calling the [Client.Index] method with a [NewIndexConnParams] object. // // Fields: // - Namespace: The namespace where index operations will be performed. @@ -26,7 +31,8 @@ import ( type IndexConnection struct { Namespace string additionalMetadata map[string]string - dataClient *data.VectorServiceClient + restClient *db_data_rest.Client + grpcClient *db_data_grpc.VectorServiceClient grpcConn *grpc.ClientConn } @@ -35,6 +41,7 @@ type newIndexParameters struct { namespace string sourceTag string additionalMetadata map[string]string + dbDataClient *db_data_rest.Client } func newIndexConnection(in newIndexParameters, dialOpts ...grpc.DialOption) (*IndexConnection, error) { @@ -65,18 +72,19 @@ func newIndexConnection(in newIndexParameters, dialOpts ...grpc.DialOption) (*In return nil, err } - dataClient := data.NewVectorServiceClient(conn) + dataClient := db_data_grpc.NewVectorServiceClient(conn) idx := IndexConnection{ Namespace: in.namespace, - dataClient: &dataClient, + restClient: in.dbDataClient, + grpcClient: &dataClient, grpcConn: conn, additionalMetadata: in.additionalMetadata, } return &idx, nil } -// Close closes the grpc.ClientConn to a Pinecone index. +// [IndexConnection.Close] closes the grpc.ClientConn to a Pinecone [Index]. // // Returns an error if the connection cannot be closed, otherwise returns nil. // @@ -117,7 +125,7 @@ func (idx *IndexConnection) Close() error { return err } -// UpsertVectors upserts vectors into a Pinecone index. +// [IndexConnection.UpsertVectors] upserts vectors into a Pinecone [Index]. // // Parameters: // - ctx: A context.Context object controls the request's lifetime, @@ -185,25 +193,24 @@ func (idx *IndexConnection) Close() error { // log.Fatalf("Successfully upserted %d vector(s)!\n", count) // } func (idx *IndexConnection) UpsertVectors(ctx context.Context, in []*Vector) (uint32, error) { - vectors := make([]*data.Vector, len(in)) + vectors := make([]*db_data_grpc.Vector, len(in)) for i, v := range in { vectors[i] = vecToGrpc(v) } - req := &data.UpsertRequest{ + req := &db_data_grpc.UpsertRequest{ Vectors: vectors, Namespace: idx.Namespace, } - res, err := (*idx.dataClient).Upsert(idx.akCtx(ctx), req) + res, err := (*idx.grpcClient).Upsert(idx.akCtx(ctx), req) if err != nil { return 0, err } return res.UpsertedCount, nil } -// FetchVectorsResponse holds the parameters for the FetchVectorsResponse object, -// which is returned by the FetchVectors method. +// [FetchVectorsResponse] is returned by the [IndexConnection.FetchVectors] method. // // Fields: // - Vectors: The vectors fetched. @@ -215,7 +222,7 @@ type FetchVectorsResponse struct { Namespace string `json:"namespace"` } -// FetchVectors fetches vectors by ID from a Pinecone index. +// [IndexConnection.FetchVectors] fetches vectors by ID from a Pinecone [Index]. // // Parameters: // - ctx: A context.Context object controls the request's lifetime, @@ -263,12 +270,12 @@ type FetchVectorsResponse struct { // fmt.Println("No vectors found") // } func (idx *IndexConnection) FetchVectors(ctx context.Context, ids []string) (*FetchVectorsResponse, error) { - req := &data.FetchRequest{ + req := &db_data_grpc.FetchRequest{ Ids: ids, Namespace: idx.Namespace, } - res, err := (*idx.dataClient).Fetch(idx.akCtx(ctx), req) + res, err := (*idx.grpcClient).Fetch(idx.akCtx(ctx), req) if err != nil { return nil, err } @@ -285,8 +292,7 @@ func (idx *IndexConnection) FetchVectors(ctx context.Context, ids []string) (*Fe }, nil } -// ListVectorsRequest holds the parameters for the ListVectorsRequest object, -// which is passed into the ListVectors method. +// [ListVectorsRequest] holds the parameters passed into the [IndexConnection.ListVectors] method. // // Fields: // - Prefix: (Optional) The prefix by which to filter. If unspecified, @@ -299,8 +305,7 @@ type ListVectorsRequest struct { PaginationToken *string } -// ListVectorsResponse holds the parameters for the ListVectorsResponse object, -// which is returned by the ListVectors method. +// [ListVectorsResponse] is returned by the [IndexConnection.ListVectors] method. // // Fields: // - VectorIds: The unique IDs of the returned vectors. @@ -314,17 +319,17 @@ type ListVectorsResponse struct { Namespace string `json:"namespace"` } -// ListVectors lists vectors in a Pinecone index. You can filter vectors by prefix, +// [IndexConnection.ListVectors] lists vectors in a Pinecone index. You can filter vectors by prefix, // limit the number of vectors returned, and paginate through results. // // Note: ListVectors is only available for Serverless indexes. // -// Returns a pointer to a ListVectorsResponse object or an error if the request fails. +// Returns a pointer to a [ListVectorsResponse] object or an error if the request fails. // // Parameters: // - ctx: A context.Context object controls the request's lifetime, // allowing for the request to be canceled or to timeout according to the context's deadline. -// - in: A ListVectorsRequest object with the parameters for the request. +// - in: A [ListVectorsRequest] object with the parameters for the request. // // Example: // @@ -371,13 +376,13 @@ type ListVectorsResponse struct { // fmt.Printf("Found %d vector(s)\n", len(res.VectorIds)) // } func (idx *IndexConnection) ListVectors(ctx context.Context, in *ListVectorsRequest) (*ListVectorsResponse, error) { - req := &data.ListRequest{ + req := &db_data_grpc.ListRequest{ Prefix: in.Prefix, Limit: in.Limit, PaginationToken: in.PaginationToken, Namespace: idx.Namespace, } - res, err := (*idx.dataClient).List(idx.akCtx(ctx), req) + res, err := (*idx.grpcClient).List(idx.akCtx(ctx), req) if err != nil { return nil, err } @@ -390,13 +395,12 @@ func (idx *IndexConnection) ListVectors(ctx context.Context, in *ListVectorsRequ return &ListVectorsResponse{ VectorIds: vectorIds, Usage: toUsage(res.Usage), - NextPaginationToken: toPaginationToken(res.Pagination), + NextPaginationToken: toPaginationTokenGrpc(res.Pagination), Namespace: idx.Namespace, }, nil } -// QueryByVectorValuesRequest holds the parameters for the QueryByVectorValuesRequest object, -// which is passed into the QueryByVectorValues method. +// [QueryByVectorValuesRequest] holds the parameters for the [IndexConnection.QueryByVectorValues] method. // // Fields: // - Vector: (Required) The query vector used to find similar vectors. @@ -414,8 +418,7 @@ type QueryByVectorValuesRequest struct { SparseValues *SparseValues } -// QueryVectorsResponse holds the parameters for the QueryVectorsResponse object, -// which is returned by the QueryByVectorValues method. +// [QueryVectorsResponse] is returned by the [IndexConnection.QueryByVectorValues] method. // // Fields: // - Matches: The vectors that are most similar to the query vector. @@ -427,9 +430,9 @@ type QueryVectorsResponse struct { Namespace string `json:"namespace"` } -// QueryByVectorValues queries a Pinecone index for vectors that are most similar to a provided query vector. +// [IndexConnection.QueryByVectorValues] queries a Pinecone [Index] for vectors that are most similar to a provided query vector. // -// Returns a pointer to a QueryVectorsResponse object or an error if the request fails. +// Returns a pointer to a [QueryVectorsResponse] object or an error if the request fails. // // Note: To issue a hybrid query with both dense and sparse values, // your index's similarity metric must be dot-product. @@ -437,7 +440,7 @@ type QueryVectorsResponse struct { // Parameters: // - ctx: A context.Context object controls the request's lifetime, // allowing for the request to be canceled or to timeout according to the context's deadline. -// - in: A QueryByVectorValuesRequest object with the parameters for the request. +// - in: A [QueryByVectorValuesRequest] object with the parameters for the request. // // Example: // @@ -501,7 +504,7 @@ type QueryVectorsResponse struct { // } // } func (idx *IndexConnection) QueryByVectorValues(ctx context.Context, in *QueryByVectorValuesRequest) (*QueryVectorsResponse, error) { - req := &data.QueryRequest{ + req := &db_data_grpc.QueryRequest{ Namespace: idx.Namespace, TopK: in.TopK, Filter: in.MetadataFilter, @@ -514,8 +517,7 @@ func (idx *IndexConnection) QueryByVectorValues(ctx context.Context, in *QueryBy return idx.query(ctx, req) } -// QueryByVectorIdRequest holds the parameters for the QueryByVectorIdRequest object, -// which is passed into the QueryByVectorId method. +// [QueryByVectorIdRequest] holds the parameters for the [IndexConnection.QueryByVectorId] method. // // Fields: // - VectorId: (Required) The unique ID of the vector used to find similar vectors. @@ -527,18 +529,18 @@ func (idx *IndexConnection) QueryByVectorValues(ctx context.Context, in *QueryBy type QueryByVectorIdRequest struct { VectorId string TopK uint32 - metadataFilter *MetadataFilter + MetadataFilter *MetadataFilter IncludeValues bool IncludeMetadata bool SparseValues *SparseValues } -// QueryByVectorId uses a vector ID to query a Pinecone index and retrieve vectors that are most similar to the +// [IndexConnection.QueryByVectorId] uses a vector ID to query a Pinecone [Index] and retrieve vectors that are most similar to the // provided ID's underlying vector. // -// Returns a pointer to a QueryVectorsResponse object or an error if the request fails. +// Returns a pointer to a [QueryVectorsResponse] object or an error if the request fails. // -// Note: QueryByVectorId executes a nearest neighbors search, meaning that unless TopK=1 in the QueryByVectorIdRequest +// Note: QueryByVectorId executes a nearest neighbors search, meaning that unless TopK=1 in the [QueryByVectorIdRequest] // object, it will return 2+ vectors. The vector with a score of 1.0 is the vector with the same ID as the query vector. // // Parameters: @@ -591,11 +593,11 @@ type QueryByVectorIdRequest struct { // } // } func (idx *IndexConnection) QueryByVectorId(ctx context.Context, in *QueryByVectorIdRequest) (*QueryVectorsResponse, error) { - req := &data.QueryRequest{ + req := &db_data_grpc.QueryRequest{ Id: in.VectorId, Namespace: idx.Namespace, TopK: in.TopK, - Filter: in.metadataFilter, + Filter: in.MetadataFilter, IncludeValues: in.IncludeValues, IncludeMetadata: in.IncludeMetadata, SparseVector: sparseValToGrpc(in.SparseValues), @@ -604,13 +606,12 @@ func (idx *IndexConnection) QueryByVectorId(ctx context.Context, in *QueryByVect return idx.query(ctx, req) } -// DeleteVectorsById deletes vectors by ID from a Pinecone index. +// [IndexConnection.DeleteVectorsById] deletes vectors by ID from a Pinecone [Index]. // -// Returns an error if the request fails, -// otherwise returns nil. This method will also return nil if the passed vector ID does not exist in the index or -// namespace. +// Returns an error if the request fails, otherwise returns nil. This method will also return +// nil if the passed vector ID does not exist in the index or namespace. // -// Note: You must instantiate an Index connection with a Namespace in NewIndexConnParams in order to delete vectors +// Note: You must create an [IndexConnection] with a Namespace in [NewIndexConnParams] in order to delete vectors // in a namespace other than the default: "". // // Parameters: @@ -652,7 +653,7 @@ func (idx *IndexConnection) QueryByVectorId(ctx context.Context, in *QueryByVect // log.Fatalf("Failed to delete vector with ID: %s. Error: %s\n", vectorId, err) // } func (idx *IndexConnection) DeleteVectorsById(ctx context.Context, ids []string) error { - req := data.DeleteRequest{ + req := db_data_grpc.DeleteRequest{ Ids: ids, Namespace: idx.Namespace, } @@ -660,13 +661,13 @@ func (idx *IndexConnection) DeleteVectorsById(ctx context.Context, ids []string) return idx.delete(ctx, &req) } -// DeleteVectorsByFilter deletes vectors from a Pinecone index, given a filter. +// [IndexConnection.DeleteVectorsByFilter] deletes vectors from a Pinecone [Index], given a filter. // // Returns an error if the request fails, otherwise returns nil. // -// Note: DeleteVectorsByFilter is only available on pods-based indexes. -// Additionally, you must instantiate an IndexConnection using the Index method with a Namespace in NewIndexConnParams -// in order to delete vectors in a namespace other than the default. +// Note: [DeleteVectorsByFilter] is only available on pods-based indexes. +// Additionally, you must create an [IndexConnection] using the [Client.Index] method with a Namespace in [NewIndexConnParams] +// in order to delete vectors in a namespace other than the default: "". // // Parameters: // - ctx: A context.Context object controls the request's lifetime, @@ -716,7 +717,7 @@ func (idx *IndexConnection) DeleteVectorsById(ctx context.Context, ids []string) // log.Fatalf("Failed to delete vector(s) with filter: %+v. Error: %s\n", filter, err) // } func (idx *IndexConnection) DeleteVectorsByFilter(ctx context.Context, metadataFilter *MetadataFilter) error { - req := data.DeleteRequest{ + req := db_data_grpc.DeleteRequest{ Filter: metadataFilter, Namespace: idx.Namespace, } @@ -724,12 +725,12 @@ func (idx *IndexConnection) DeleteVectorsByFilter(ctx context.Context, metadataF return idx.delete(ctx, &req) } -// DeleteAllVectorsInNamespace deletes all vectors in a specific namespace. +// [IndexConnection.DeleteAllVectorsInNamespace] deletes all vectors in a specific namespace. // // Returns an error if the request fails, otherwise returns nil. // -// Note: You must instantiate an IndexConnection using the Index method with a Namespace in NewIndexConnParams -// in order to delete vectors in a namespace other than the default. +// Note: You must instantiate an [IndexConnection] using the [Client.Index] method with a Namespace in [NewIndexConnParams] +// in order to delete vectors in a namespace other than the default: "". // // Parameters: // - ctx: A context.Context object controls the request's lifetime, @@ -768,7 +769,7 @@ func (idx *IndexConnection) DeleteVectorsByFilter(ctx context.Context, metadataF // log.Fatalf("Failed to delete vectors in namespace: \"%s\". Error: %s", idxConnection.Namespace, err) // } func (idx *IndexConnection) DeleteAllVectorsInNamespace(ctx context.Context) error { - req := data.DeleteRequest{ + req := db_data_grpc.DeleteRequest{ Namespace: idx.Namespace, DeleteAll: true, } @@ -776,8 +777,7 @@ func (idx *IndexConnection) DeleteAllVectorsInNamespace(ctx context.Context) err return idx.delete(ctx, &req) } -// UpdateVectorRequest holds the parameters for the UpdateVectorRequest object, -// which is passed into the UpdateVector method. +// [UpdateVectorRequest] holds the parameters for the [IndexConnection.UpdateVector] method. // // Fields: // - Id: (Required) The unique ID of the vector to update. @@ -791,14 +791,14 @@ type UpdateVectorRequest struct { Metadata *Metadata } -// UpdateVector updates a vector in a Pinecone index by ID. +// [IndexConnection.UpdateVector] updates a vector in a Pinecone [Index] by ID. // // Returns an error if the request fails, returns nil otherwise. // // Parameters: // - ctx: A context.Context object controls the request's lifetime, // allowing for the request to be canceled or to timeout according to the context's deadline. -// - in: An UpdateVectorRequest object with the parameters for the request. +// - in: An [UpdateVectorRequest] object with the parameters for the request. // // Example: // @@ -842,7 +842,7 @@ func (idx *IndexConnection) UpdateVector(ctx context.Context, in *UpdateVectorRe return fmt.Errorf("a vector ID plus at least one of Values, SparseValues, or Metadata must be provided to update a vector") } - req := &data.UpdateRequest{ + req := &db_data_grpc.UpdateRequest{ Id: in.Id, Values: in.Values, SparseValues: sparseValToGrpc(in.SparseValues), @@ -850,18 +850,17 @@ func (idx *IndexConnection) UpdateVector(ctx context.Context, in *UpdateVectorRe Namespace: idx.Namespace, } - _, err := (*idx.dataClient).Update(idx.akCtx(ctx), req) + _, err := (*idx.grpcClient).Update(idx.akCtx(ctx), req) return err } -// DescribeIndexStatsResponse holds the parameters for the DescribeIndexStatsResponse object, -// which is returned by the DescribeIndexStats method. +// [DescribeIndexStatsResponse] is returned by the [IndexConnection.DescribeIndexStats] method. // // Fields: -// - Dimension: The dimension of the index. -// - IndexFullness: The fullness level of the index. Note: only available on pods-based indexes. -// - TotalVectorCount: The total number of vectors in the index. -// - Namespaces: The namespace(s) in the index. +// - Dimension: The dimension of the [Index]. +// - IndexFullness: The fullness level of the [Index]. Note: only available on pods-based indexes. +// - TotalVectorCount: The total number of vectors in the [Index]. +// - Namespaces: The namespace(s) in the [Index]. type DescribeIndexStatsResponse struct { Dimension uint32 `json:"dimension"` IndexFullness float32 `json:"index_fullness"` @@ -869,9 +868,9 @@ type DescribeIndexStatsResponse struct { Namespaces map[string]*NamespaceSummary `json:"namespaces,omitempty"` } -// DescribeIndexStats returns statistics about a Pinecone index. +// [IndexConnection.DescribeIndexStats] returns statistics about a Pinecone [Index]. // -// Returns a pointer to a DescribeIndexStatsResponse object or an error if the request fails. +// Returns a pointer to a [DescribeIndexStatsResponse] object or an error if the request fails. // // Parameters: // - ctx: A context.Context object controls the request's lifetime, @@ -915,9 +914,9 @@ func (idx *IndexConnection) DescribeIndexStats(ctx context.Context) (*DescribeIn return idx.DescribeIndexStatsFiltered(ctx, nil) } -// DescribeIndexStatsFiltered returns statistics about a Pinecone index, filtered by a given filter. +// [IndexConnection.DescribeIndexStatsFiltered] returns statistics about a Pinecone [Index], filtered by a given filter. // -// Returns a pointer to a DescribeIndexStatsResponse object or an error if the request fails. +// Returns a pointer to a [DescribeIndexStatsResponse] object or an error if the request fails. // // Note: DescribeIndexStatsFiltered is only available on pods-based indexes. // @@ -973,10 +972,10 @@ func (idx *IndexConnection) DescribeIndexStats(ctx context.Context) (*DescribeIn // } // } func (idx *IndexConnection) DescribeIndexStatsFiltered(ctx context.Context, metadataFilter *MetadataFilter) (*DescribeIndexStatsResponse, error) { - req := &data.DescribeIndexStatsRequest{ + req := &db_data_grpc.DescribeIndexStatsRequest{ Filter: metadataFilter, } - res, err := (*idx.dataClient).DescribeIndexStats(idx.akCtx(ctx), req) + res, err := (*idx.grpcClient).DescribeIndexStats(idx.akCtx(ctx), req) if err != nil { return nil, err } @@ -996,8 +995,309 @@ func (idx *IndexConnection) DescribeIndexStatsFiltered(ctx context.Context, meta }, nil } -func (idx *IndexConnection) query(ctx context.Context, req *data.QueryRequest) (*QueryVectorsResponse, error) { - res, err := (*idx.dataClient).Query(idx.akCtx(ctx), req) +// [StartImportResponse] holds the response parameters for the [IndexConnection.StartImport] method. +// +// Fields: +// - Id: The ID of the import process that was started. +type StartImportResponse struct { + Id string `json:"id,omitempty"` +} + +// [IndexConnection.StartImport] imports data from a storage provider into an [Index]. The uri parameter must start with the +// scheme of a supported storage provider (e.g. "s3://"). For buckets that are not publicly readable, you will also need to +// separately configure a [storage integration] and pass the integration id. +// +// Returns a pointer to a [StartImportResponse] object with the [Import] ID or an error if the request fails. +// +// Parameters: +// - ctx: A context.Context object controls the request's lifetime, +// allowing for the request to be canceled or to timeout according to the context's deadline. +// - uri: The URI of the data to import. The URI must start with the scheme of a supported storage provider. +// - integrationId: If your bucket requires authentication to access, you need to pass the id of your storage integration using this property. +// Pass nil if not required. +// - errorMode: If set to "continue", the import operation will continue even if some records fail to import. +// Pass "abort" to stop the import operation if any records fail. Will default to "continue" if nil is passed. +// +// Example: +// +// ctx := context.Background() +// +// clientParams := pinecone.NewClientParams{ +// ApiKey: "YOUR_API_KEY", +// SourceTag: "your_source_identifier", // optional +// } +// +// pc, err := pinecone.NewClient(clientParams) +// if err != nil { +// log.Fatalf("Failed to create Client: %v", err) +// } +// +// idx, err := pc.DescribeIndex(ctx, "your-index-name") +// if err != nil { +// log.Fatalf("Failed to describe index \"%s\". Error:%s", idx.Name, err) +// } +// +// idxConnection, err := pc.Index(pinecone.NewIndexConnParams{Host: idx.Host}) +// if err != nil { +// log.Fatalf("Failed to create IndexConnection for Host: %v. Error: %v", idx.Host, err) +// } +// +// uri := "s3://your-bucket/your-file.csv" +// errorMode := "abort" +// importRes, err := idxConnection.StartImport(ctx, uri, nil, &errorMode) +// if err != nil { +// log.Fatalf("Failed to start import: %v", err) +// } +// fmt.Printf("import starteed with ID: %s", importRes.Id) +// +// [storage integration]: https://docs.pinecone.io/guides/operations/integrations/manage-storage-integrations +func (idx *IndexConnection) StartImport(ctx context.Context, uri string, integrationId *string, errorMode *ImportErrorMode) (*StartImportResponse, error) { + if uri == "" { + return nil, fmt.Errorf("must specify a uri to start an import") + } + + req := db_data_rest.StartImportRequest{ + Uri: uri, + IntegrationId: integrationId, + } + + if errorMode != nil { + req.ErrorMode = &db_data_rest.ImportErrorMode{ + OnError: pointerOrNil(db_data_rest.ImportErrorModeOnError(*errorMode)), + } + } + + res, err := (*idx.restClient).StartBulkImport(idx.akCtx(ctx), req) + if err != nil { + return nil, err + } + defer res.Body.Close() + + if res.StatusCode != http.StatusOK { + return nil, handleErrorResponseBody(res, "failed to start import: ") + } + + return decodeStartImportResponse(res.Body) +} + +// [IndexConnection.DescribeImport] retrieves information about a specific [Import] operation. +// +// Returns a pointer to an [Import] object representing the current state of the import process, or an error if the request fails. +// +// Parameters: +// - ctx: A context.Context object controls the request's lifetime, +// allowing for the request to be canceled or to timeout according to the context's deadline. +// - id: The id of the import operation. This is returned when you call [IndexConnection.StartImport], or can be retrieved +// through the [IndexConnection.ListImports] method. +// +// Example: +// +// ctx := context.Background() +// +// clientParams := pinecone.NewClientParams{ +// ApiKey: "YOUR_API_KEY", +// SourceTag: "your_source_identifier", // optional +// } +// +// pc, err := pinecone.NewClient(clientParams) +// if err != nil { +// log.Fatalf("Failed to create Client: %v", err) +// } +// +// idx, err := pc.DescribeIndex(ctx, "your-index-name") +// if err != nil { +// log.Fatalf("Failed to describe index \"%s\". Error:%s", idx.Name, err) +// } +// +// idxConnection, err := pc.Index(pinecone.NewIndexConnParams{Host: idx.Host}) +// if err != nil { +// log.Fatalf("Failed to create IndexConnection for Host: %v. Error: %v", idx.Host, err) +// } +// importDesc, err := idxConnection.DescribeImport(ctx, "your-import-id") +// if err != nil { +// log.Fatalf("Failed to describe import: %s - %v", "your-import-id", err) +// } +// fmt.Printf("Import ID: %s, Status: %s", importDesc.Id, importDesc.Status) +func (idx *IndexConnection) DescribeImport(ctx context.Context, id string) (*Import, error) { + res, err := (*idx.restClient).DescribeBulkImport(idx.akCtx(ctx), id) + if err != nil { + return nil, err + } + defer res.Body.Close() + + importModel, err := decodeImportModel(res.Body) + if err != nil { + return nil, err + } + return toImport(importModel), nil +} + +// [ListImportsRequest] holds the parameters for the [IndexConnection.ListImports] method. +// +// Fields: +// - Limit: The maximum number of imports to return. +// - PaginationToken: The token to retrieve the next page of imports, if available. +type ListImportsRequest struct { + Limit *int32 + PaginationToken *string +} + +// [ListImportsResponse] holds the result of listing [Import] objects. +// +// Fields: +// - Imports: The list of [Import] objects returned. +// - NextPaginationToken: The token for paginating through results, if more imports are available. +type ListImportsResponse struct { + Imports []*Import `json:"imports,omitempty"` + NextPaginationToken *string `json:"next_pagination_token,omitempty"` +} + +// [IndexConnection.ListImports] returns information about [Import] operations. It returns operations in a +// paginated form, with a pagination token to fetch the next page of results. +// +// Returns a pointer to a [ListImportsResponse] object or an error if the request fails. +// +// Parameters: +// - ctx: A context.Context object controls the request's lifetime, +// allowing for the request to be canceled or to timeout according to the context's deadline. +// - req: A [ListImportsRequest] object containing pagination and filter options. +// +// Example: +// +// ctx := context.Background() +// +// clientParams := NewClientParams{ +// ApiKey: "YOUR_API_KEY", +// SourceTag: "your_source_identifier", // optional +// } +// +// pc, err := NewClient(clientParams) +// if err != nil { +// log.Fatalf("Failed to create Client: %v", err) +// } +// +// idx, err := pc.DescribeIndex(ctx, "your-index-name") +// if err != nil { +// log.Fatalf("Failed to describe index \"%s\". Error:%s", idx.Name, err) +// } +// +// idxConnection, err := pc.Index(NewIndexConnParams{Host: idx.Host}) +// if err != nil { +// log.Fatalf("Failed to create IndexConnection for Host: %v. Error: %v", idx.Host, err) +// } +// +// limit := int32(10) +// firstImportPage, err := idxConnection.ListImports(ctx, &limit, nil) +// if err != nil { +// log.Fatalf("Failed to list imports: %v", err) +// } +// fmt.Printf("First page of imports: %+v", firstImportPage.Imports) +// +// paginationToken := firstImportPage.NextPaginationToken +// nextImportPage, err := idxConnection.ListImports(ctx, &limit, paginationToken) +// if err != nil { +// log.Fatalf("Failed to list imports: %v", err) +// } +// fmt.Printf("Second page of imports: %+v", nextImportPage.Imports) +func (idx *IndexConnection) ListImports(ctx context.Context, limit *int32, paginationToken *string) (*ListImportsResponse, error) { + params := db_data_rest.ListBulkImportsParams{ + Limit: limit, + PaginationToken: paginationToken, + } + + res, err := (*idx.restClient).ListBulkImports(idx.akCtx(ctx), ¶ms) + if err != nil { + return nil, err + } + + listImportsResponse, err := decodeListImportsResponse(res.Body) + if err != nil { + return nil, err + } + + return listImportsResponse, nil +} + +// [IndexConnection.CancelImport] cancels an [Import] operation by id. +// +// Returns an error if the request fails. +// +// Parameters: +// - ctx: A context.Context object controls the request's lifetime, +// allowing for the request to be canceled or to timeout according to the context's deadline. +// - id: The id of the [Import] operation to cancel. +// +// Example: +// +// ctx := context.Background() +// +// clientParams := NewClientParams{ +// ApiKey: "YOUR_API_KEY", +// SourceTag: "your_source_identifier", // optional +// } +// +// pc, err := NewClient(clientParams) +// if err != nil { +// log.Fatalf("Failed to create Client: %v", err) +// } +// +// idx, err := pc.DescribeIndex(ctx, "your-index-name") +// if err != nil { +// log.Fatalf("Failed to describe index \"%s\". Error:%s", idx.Name, err) +// } +// +// idxConnection, err := pc.Index(NewIndexConnParams{Host: idx.Host}) +// if err != nil { +// log.Fatalf("Failed to create IndexConnection for Host: %v. Error: %v", idx.Host, err) +// } +// +// err = idxConnection.CancelImport(ctx, "your-import-id") +// if err != nil { +// log.Fatalf("Failed to cancel import: %s", "your-import-id") +// } +func (idx *IndexConnection) CancelImport(ctx context.Context, id string) error { + res, err := (*idx.restClient).CancelBulkImport(idx.akCtx(ctx), id) + if err != nil { + return err + } + defer res.Body.Close() + + if res.StatusCode != http.StatusOK { + return handleErrorResponseBody(res, "failed to cancel import: ") + } + + return nil +} + +func decodeListImportsResponse(body io.ReadCloser) (*ListImportsResponse, error) { + var listImportsResponse *db_data_rest.ListImportsResponse + if err := json.NewDecoder(body).Decode(&listImportsResponse); err != nil { + return nil, err + } + + return toListImportsResponse(listImportsResponse), nil +} + +func decodeImportModel(body io.ReadCloser) (*db_data_rest.ImportModel, error) { + var importModel db_data_rest.ImportModel + if err := json.NewDecoder(body).Decode(&importModel); err != nil { + return nil, err + } + + return &importModel, nil +} + +func decodeStartImportResponse(body io.ReadCloser) (*StartImportResponse, error) { + var importResponse *db_data_rest.StartImportResponse + if err := json.NewDecoder(body).Decode(&importResponse); err != nil { + return nil, err + } + + return toImportResponse(importResponse), nil +} + +func (idx *IndexConnection) query(ctx context.Context, req *db_data_grpc.QueryRequest) (*QueryVectorsResponse, error) { + res, err := (*idx.grpcClient).Query(idx.akCtx(ctx), req) if err != nil { return nil, err } @@ -1014,8 +1314,8 @@ func (idx *IndexConnection) query(ctx context.Context, req *data.QueryRequest) ( }, nil } -func (idx *IndexConnection) delete(ctx context.Context, req *data.DeleteRequest) error { - _, err := (*idx.dataClient).Delete(idx.akCtx(ctx), req) +func (idx *IndexConnection) delete(ctx context.Context, req *db_data_grpc.DeleteRequest) error { + _, err := (*idx.grpcClient).Delete(idx.akCtx(ctx), req) return err } @@ -1029,7 +1329,7 @@ func (idx *IndexConnection) akCtx(ctx context.Context) context.Context { return metadata.AppendToOutgoingContext(ctx, newMetadata...) } -func toVector(vector *data.Vector) *Vector { +func toVector(vector *db_data_grpc.Vector) *Vector { if vector == nil { return nil } @@ -1041,11 +1341,11 @@ func toVector(vector *data.Vector) *Vector { } } -func toScoredVector(sv *data.ScoredVector) *ScoredVector { +func toScoredVector(sv *db_data_grpc.ScoredVector) *ScoredVector { if sv == nil { return nil } - v := toVector(&data.Vector{ + v := toVector(&db_data_grpc.Vector{ Id: sv.Id, Values: sv.Values, SparseValues: sv.SparseValues, @@ -1057,7 +1357,7 @@ func toScoredVector(sv *data.ScoredVector) *ScoredVector { } } -func toSparseValues(sv *data.SparseValues) *SparseValues { +func toSparseValues(sv *db_data_grpc.SparseValues) *SparseValues { if sv == nil { return nil } @@ -1067,7 +1367,7 @@ func toSparseValues(sv *data.SparseValues) *SparseValues { } } -func toUsage(u *data.Usage) *Usage { +func toUsage(u *db_data_grpc.Usage) *Usage { if u == nil { return nil } @@ -1076,18 +1376,66 @@ func toUsage(u *data.Usage) *Usage { } } -func toPaginationToken(p *data.Pagination) *string { +func toPaginationTokenGrpc(p *db_data_grpc.Pagination) *string { if p == nil { return nil } return &p.Next } -func vecToGrpc(v *Vector) *data.Vector { +func toPaginationTokenRest(p *db_data_rest.Pagination) *string { + if p == nil { + return nil + } + return p.Next +} + +func toImport(importModel *db_data_rest.ImportModel) *Import { + if importModel == nil { + return nil + } + + return &Import{ + Id: *importModel.Id, + Uri: *importModel.Uri, + Status: ImportStatus(*importModel.Status), + CreatedAt: importModel.CreatedAt, + FinishedAt: importModel.FinishedAt, + Error: importModel.Error, + } +} + +func toImportResponse(importResponse *db_data_rest.StartImportResponse) *StartImportResponse { + if importResponse == nil { + return nil + } + + return &StartImportResponse{ + Id: derefOrDefault(importResponse.Id, ""), + } +} + +func toListImportsResponse(listImportsResponse *db_data_rest.ListImportsResponse) *ListImportsResponse { + if listImportsResponse == nil { + return nil + } + + imports := make([]*Import, len(*listImportsResponse.Data)) + for i, importModel := range *listImportsResponse.Data { + imports[i] = toImport(&importModel) + } + + return &ListImportsResponse{ + Imports: imports, + NextPaginationToken: toPaginationTokenRest(listImportsResponse.Pagination), + } +} + +func vecToGrpc(v *Vector) *db_data_grpc.Vector { if v == nil { return nil } - return &data.Vector{ + return &db_data_grpc.Vector{ Id: v.Id, Values: v.Values, Metadata: v.Metadata, @@ -1095,11 +1443,11 @@ func vecToGrpc(v *Vector) *data.Vector { } } -func sparseValToGrpc(sv *SparseValues) *data.SparseValues { +func sparseValToGrpc(sv *SparseValues) *db_data_grpc.SparseValues { if sv == nil { return nil } - return &data.SparseValues{ + return &db_data_grpc.SparseValues{ Indices: sv.Indices, Values: sv.Values, } diff --git a/pinecone/index_connection_test.go b/pinecone/index_connection_test.go index adc98ff..6d6e5b9 100644 --- a/pinecone/index_connection_test.go +++ b/pinecone/index_connection_test.go @@ -8,7 +8,7 @@ import ( "testing" "time" - "github.com/pinecone-io/go-pinecone/internal/gen/data" + db_data_grpc "github.com/pinecone-io/go-pinecone/internal/gen/db_data/grpc" "github.com/pinecone-io/go-pinecone/internal/utils" "google.golang.org/grpc" "google.golang.org/grpc/metadata" @@ -177,7 +177,7 @@ func (ts *IntegrationTests) TestMetadataAppliedToRequests() { require.True(ts.T(), ok, "Expected client to have an 'api-key' header") require.Equal(ts.T(), apiKey, apiKeyHeader, "Expected 'api-key' header to equal %s", apiKey) require.Equal(ts.T(), namespace, idxConn.Namespace, "Expected idxConn to have namespace '%s', but got '%s'", namespace, idxConn.Namespace) - require.NotNil(ts.T(), idxConn.dataClient, "Expected idxConn to have non-nil dataClient") + require.NotNil(ts.T(), idxConn.grpcClient, "Expected idxConn to have non-nil dataClient") require.NotNil(ts.T(), idxConn.grpcConn, "Expected idxConn to have non-nil grpcConn") // initiate request to trigger the MetadataInterceptor @@ -227,20 +227,22 @@ func (ts *IntegrationTests) TestUpdateVectorMetadata() { }) assert.NoError(ts.T(), err) - time.Sleep(5 * time.Second) + time.Sleep(10 * time.Second) vector, err := ts.idxConn.FetchVectors(ctx, []string{ts.vectorIds[0]}) if err != nil { ts.FailNow(fmt.Sprintf("Failed to fetch vector: %v", err)) } + assert.NotNil(ts.T(), vector.Vectors[ts.vectorIds[0]].Metadata, "Metadata is nil after update") + expectedGenre := expectedMetadataMap.Fields["genre"].GetStringValue() actualGenre := vector.Vectors[ts.vectorIds[0]].Metadata.Fields["genre"].GetStringValue() assert.Equal(ts.T(), expectedGenre, actualGenre, "Metadata does not match") } -func (ts *IntegrationTests) TestUpdateVectorSparseValues() error { +func (ts *IntegrationTests) TestUpdateVectorSparseValues() { ctx := context.Background() dims := int(ts.dimension) @@ -269,8 +271,45 @@ func (ts *IntegrationTests) TestUpdateVectorSparseValues() error { actualSparseValues := vector.Vectors[ts.vectorIds[0]].SparseValues.Values assert.ElementsMatch(ts.T(), expectedSparseValues.Values, actualSparseValues, "Sparse values do not match") +} + +func (ts *IntegrationTests) TestImportFlowHappyPath() { + if ts.indexType != "serverless" { + ts.T().Skip("Skipping import flow test for non-serverless index") + } - return nil + testImportUri := "s3://dev-bulk-import-datasets-pub/10-records-dim-10/" + ctx := context.Background() + errorMode := "continue" + + startRes, err := ts.idxConn.StartImport(ctx, testImportUri, nil, (*ImportErrorMode)(&errorMode)) + assert.NoError(ts.T(), err) + assert.NotNil(ts.T(), startRes) + + assert.NotNil(ts.T(), startRes.Id) + describeRes, err := ts.idxConn.DescribeImport(ctx, startRes.Id) + assert.NoError(ts.T(), err) + assert.NotNil(ts.T(), describeRes) + assert.Equal(ts.T(), startRes.Id, describeRes.Id) + + limit := int32(10) + listRes, err := ts.idxConn.ListImports(ctx, &limit, nil) + assert.NoError(ts.T(), err) + assert.NotNil(ts.T(), listRes) + + err = ts.idxConn.CancelImport(ctx, startRes.Id) + assert.NoError(ts.T(), err) +} + +func (ts *IntegrationTests) TestImportFlowNoUriError() { + if ts.indexType != "serverless" { + ts.T().Skip("Skipping import flow test for non-serverless index") + } + + ctx := context.Background() + _, err := ts.idxConn.StartImport(ctx, "", nil, nil) + assert.Error(ts.T(), err) + assert.Contains(ts.T(), err.Error(), "must specify a uri") } // Unit tests: @@ -299,7 +338,7 @@ func TestNewIndexConnection(t *testing.T) { require.True(t, ok, "Expected client to have an 'api-key' header") require.Equal(t, apiKey, apiKeyHeader, "Expected 'api-key' header to equal %s", apiKey) require.Empty(t, idxConn.Namespace, "Expected idxConn to have empty namespace, but got '%s'", idxConn.Namespace) - require.NotNil(t, idxConn.dataClient, "Expected idxConn to have non-nil dataClient") + require.NotNil(t, idxConn.grpcClient, "Expected idxConn to have non-nil dataClient") require.NotNil(t, idxConn.grpcConn, "Expected idxConn to have non-nil grpcConn") } @@ -320,7 +359,7 @@ func TestNewIndexConnectionNamespace(t *testing.T) { require.True(t, ok, "Expected client to have an 'api-key' header") require.Equal(t, apiKey, apiKeyHeader, "Expected 'api-key' header to equal %s", apiKey) require.Equal(t, namespace, idxConn.Namespace, "Expected idxConn to have namespace '%s', but got '%s'", namespace, idxConn.Namespace) - require.NotNil(t, idxConn.dataClient, "Expected idxConn to have non-nil dataClient") + require.NotNil(t, idxConn.grpcClient, "Expected idxConn to have non-nil dataClient") require.NotNil(t, idxConn.grpcConn, "Expected idxConn to have non-nil grpcConn") } @@ -518,7 +557,7 @@ func TestMarshalDescribeIndexStatsResponseUnit(t *testing.T) { func TestToVectorUnit(t *testing.T) { tests := []struct { name string - vector *data.Vector + vector *db_data_grpc.Vector expected *Vector }{ { @@ -528,7 +567,7 @@ func TestToVectorUnit(t *testing.T) { }, { name: "Pass dense vector", - vector: &data.Vector{ + vector: &db_data_grpc.Vector{ Id: "dense-1", Values: []float32{0.01, 0.02, 0.03}, }, @@ -539,10 +578,10 @@ func TestToVectorUnit(t *testing.T) { }, { name: "Pass sparse vector", - vector: &data.Vector{ + vector: &db_data_grpc.Vector{ Id: "sparse-1", Values: nil, - SparseValues: &data.SparseValues{ + SparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -558,10 +597,10 @@ func TestToVectorUnit(t *testing.T) { }, { name: "Pass hybrid vector", - vector: &data.Vector{ + vector: &db_data_grpc.Vector{ Id: "hybrid-1", Values: []float32{0.01, 0.02, 0.03}, - SparseValues: &data.SparseValues{ + SparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -578,10 +617,10 @@ func TestToVectorUnit(t *testing.T) { }, { name: "Pass hybrid vector with metadata", - vector: &data.Vector{ + vector: &db_data_grpc.Vector{ Id: "hybrid-metadata-1", Values: []float32{0.01, 0.02, 0.03}, - SparseValues: &data.SparseValues{ + SparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -616,7 +655,7 @@ func TestToVectorUnit(t *testing.T) { func TestToSparseValuesUnit(t *testing.T) { tests := []struct { name string - sparseValues *data.SparseValues + sparseValues *db_data_grpc.SparseValues expected *SparseValues }{ { @@ -626,7 +665,7 @@ func TestToSparseValuesUnit(t *testing.T) { }, { name: "Pass sparse values", - sparseValues: &data.SparseValues{ + sparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -647,7 +686,7 @@ func TestToSparseValuesUnit(t *testing.T) { func TestToScoredVectorUnit(t *testing.T) { tests := []struct { name string - scoredVector *data.ScoredVector + scoredVector *db_data_grpc.ScoredVector expected *ScoredVector }{ { @@ -657,7 +696,7 @@ func TestToScoredVectorUnit(t *testing.T) { }, { name: "Pass scored dense vector", - scoredVector: &data.ScoredVector{ + scoredVector: &db_data_grpc.ScoredVector{ Id: "dense-1", Values: []float32{0.01, 0.01, 0.01}, Score: 0.1, @@ -672,9 +711,9 @@ func TestToScoredVectorUnit(t *testing.T) { }, { name: "Pass scored sparse vector", - scoredVector: &data.ScoredVector{ + scoredVector: &db_data_grpc.ScoredVector{ Id: "sparse-1", - SparseValues: &data.SparseValues{ + SparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -693,10 +732,10 @@ func TestToScoredVectorUnit(t *testing.T) { }, { name: "Pass scored hybrid vector", - scoredVector: &data.ScoredVector{ + scoredVector: &db_data_grpc.ScoredVector{ Id: "hybrid-1", Values: []float32{0.01, 0.02, 0.03}, - SparseValues: &data.SparseValues{ + SparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -716,10 +755,10 @@ func TestToScoredVectorUnit(t *testing.T) { }, { name: "Pass scored hybrid vector with metadata", - scoredVector: &data.ScoredVector{ + scoredVector: &db_data_grpc.ScoredVector{ Id: "hybrid-metadata-1", Values: []float32{0.01, 0.02, 0.03}, - SparseValues: &data.SparseValues{ + SparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -760,7 +799,7 @@ func TestVecToGrpcUnit(t *testing.T) { tests := []struct { name string vector *Vector - expected *data.Vector + expected *db_data_grpc.Vector }{ { name: "Pass nil vector, expect nil to be returned", @@ -773,7 +812,7 @@ func TestVecToGrpcUnit(t *testing.T) { Id: "dense-1", Values: []float32{0.01, 0.02, 0.03}, }, - expected: &data.Vector{ + expected: &db_data_grpc.Vector{ Id: "dense-1", Values: []float32{0.01, 0.02, 0.03}, }, @@ -788,9 +827,9 @@ func TestVecToGrpcUnit(t *testing.T) { Values: []float32{0.01, 0.03}, }, }, - expected: &data.Vector{ + expected: &db_data_grpc.Vector{ Id: "sparse-1", - SparseValues: &data.SparseValues{ + SparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -806,10 +845,10 @@ func TestVecToGrpcUnit(t *testing.T) { Values: []float32{0.01, 0.03}, }, }, - expected: &data.Vector{ + expected: &db_data_grpc.Vector{ Id: "hybrid-1", Values: []float32{0.01, 0.02, 0.03}, - SparseValues: &data.SparseValues{ + SparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -830,10 +869,10 @@ func TestVecToGrpcUnit(t *testing.T) { }, }, }, - expected: &data.Vector{ + expected: &db_data_grpc.Vector{ Id: "hybrid-metadata-1", Values: []float32{0.01, 0.02, 0.03}, - SparseValues: &data.SparseValues{ + SparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -859,7 +898,7 @@ func TestSparseValToGrpcUnit(t *testing.T) { name string sparseValues *SparseValues metadata *structpb.Struct - expected *data.SparseValues + expected *db_data_grpc.SparseValues }{ { name: "Pass nil sparse values, expect nil to be returned", @@ -872,7 +911,7 @@ func TestSparseValToGrpcUnit(t *testing.T) { Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, - expected: &data.SparseValues{ + expected: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -888,7 +927,7 @@ func TestSparseValToGrpcUnit(t *testing.T) { "genre": {Kind: &structpb.Value_StringValue{StringValue: "classical"}}, }, }, - expected: &data.SparseValues{ + expected: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -971,7 +1010,7 @@ func TestToUsageUnit(t *testing.T) { tests := []struct { name string - usage *data.Usage + usage *db_data_grpc.Usage expected *Usage }{ { @@ -981,7 +1020,7 @@ func TestToUsageUnit(t *testing.T) { }, { name: "Pass usage", - usage: &data.Usage{ + usage: &db_data_grpc.Usage{ ReadUnits: &u5, }, expected: &Usage{ @@ -1027,23 +1066,23 @@ func TestNormalizeHostUnit(t *testing.T) { } } -func TestToPaginationToken(t *testing.T) { +func TestToPaginationTokenGrpc(t *testing.T) { tokenForNilCase := "" tokenForPositiveCase := "next-token" tests := []struct { name string - token *data.Pagination + token *db_data_grpc.Pagination expected *string }{ { name: "Pass empty token, expect empty string to be returned", - token: &data.Pagination{}, + token: &db_data_grpc.Pagination{}, expected: &tokenForNilCase, }, { name: "Pass token", - token: &data.Pagination{ + token: &db_data_grpc.Pagination{ Next: "next-token", }, expected: &tokenForPositiveCase, @@ -1052,7 +1091,7 @@ func TestToPaginationToken(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - result := toPaginationToken(tt.token) + result := toPaginationTokenGrpc(tt.token) assert.Equal(t, tt.expected, result, "Expected result to be '%s', but got '%s'", tt.expected, result) }) } diff --git a/pinecone/models.go b/pinecone/models.go index 661adf5..aab1db8 100644 --- a/pinecone/models.go +++ b/pinecone/models.go @@ -1,10 +1,12 @@ package pinecone import ( + "time" + "google.golang.org/protobuf/types/known/structpb" ) -// IndexMetric is the [distance metric] to be used by similarity search against a Pinecone Index. +// [IndexMetric] is the [distance metric] to be used by similarity search against a Pinecone [Index]. // // [distance metric]: https://docs.pinecone.io/guides/indexes/understanding-indexes#distance-metrics type IndexMetric string @@ -15,7 +17,7 @@ const ( Euclidean IndexMetric = "euclidean" // Ideal for distance-based data (e.g. lat/long points) ) -// IndexStatusState is the state of a Pinecone Index. +// [IndexStatusState] is the state of a Pinecone [Index]. type IndexStatusState string const ( @@ -29,8 +31,8 @@ const ( Terminating IndexStatusState = "Terminating" ) -// DeletionProtection determines whether [deletion protection] is "enabled" or "disabled" for the index. -// When "enabled", the index cannot be deleted. Defaults to "disabled". +// [DeletionProtection] determines whether [deletion protection] is "enabled" or "disabled" for the [Index]. +// When "enabled", the [Index] cannot be deleted. Defaults to "disabled". // // [deletion protection]: http://docs.pinecone.io/guides/indexes/prevent-index-deletion type DeletionProtection string @@ -40,7 +42,7 @@ const ( DeletionProtectionDisabled DeletionProtection = "disabled" ) -// Cloud is the [cloud provider] to be used for a Pinecone serverless Index. +// [Cloud] is the [cloud provider] to be used for a Pinecone serverless [Index]. // // [cloud provider]: https://docs.pinecone.io/troubleshooting/available-cloud-regions type Cloud string @@ -51,19 +53,19 @@ const ( Gcp Cloud = "gcp" ) -// IndexStatus is the status of a Pinecone Index. +// [IndexStatus] is the status of a Pinecone [Index]. type IndexStatus struct { Ready bool `json:"ready"` State IndexStatusState `json:"state"` } -// IndexSpec is the infrastructure specification (pods vs serverless) of a Pinecone Index. +// [IndexSpec] is the infrastructure specification (pods vs serverless) of a Pinecone [Index]. type IndexSpec struct { Pod *PodSpec `json:"pod,omitempty"` Serverless *ServerlessSpec `json:"serverless,omitempty"` } -// Index is a Pinecone Index object. Can be either a pod-based or a serverless Index, depending on the IndexSpec. +// [Index] is a Pinecone [Index] object. Can be either a pod-based or a serverless [Index], depending on the [IndexSpec]. type Index struct { Name string `json:"name"` Dimension int32 `json:"dimension"` @@ -74,9 +76,9 @@ type Index struct { Status *IndexStatus `json:"status,omitempty"` } -// Collection is a Pinecone [Collection object]. Only available for pod-based Indexes. +// [Collection] is a Pinecone [collection entity]. Only available for pod-based Indexes. // -// [Collection object]: https://docs.pinecone.io/guides/indexes/understanding-collections +// [collection entity]: https://docs.pinecone.io/guides/indexes/understanding-collections type Collection struct { Name string `json:"name"` Size int64 `json:"size"` @@ -86,7 +88,7 @@ type Collection struct { Environment string `json:"environment"` } -// CollectionStatus is the status of a Pinecone Collection. +// [CollectionStatus] is the status of a Pinecone [Collection]. type CollectionStatus string const ( @@ -95,29 +97,29 @@ const ( CollectionStatusTerminating CollectionStatus = "Terminating" ) -// PodSpecMetadataConfig represents the metadata fields to be indexed when a Pinecone Index is created. +// [PodSpecMetadataConfig] represents the metadata fields to be indexed when a Pinecone [Index] is created. type PodSpecMetadataConfig struct { Indexed *[]string `json:"indexed,omitempty"` } -// PodSpec is the infrastructure specification of a pod-based Pinecone Index. Only available for pod-based Indexes. +// [PodSpec] is the infrastructure specification of a pod-based Pinecone [Index]. Only available for pod-based Indexes. type PodSpec struct { Environment string `json:"environment"` PodType string `json:"pod_type"` - PodCount int32 `json:"pod_count"` + PodCount int `json:"pod_count"` Replicas int32 `json:"replicas"` ShardCount int32 `json:"shard_count"` SourceCollection *string `json:"source_collection,omitempty"` MetadataConfig *PodSpecMetadataConfig `json:"metadata_config,omitempty"` } -// ServerlessSpec is the infrastructure specification of a serverless Pinecone Index. Only available for serverless Indexes. +// [ServerlessSpec] is the infrastructure specification of a serverless Pinecone [Index]. Only available for serverless Indexes. type ServerlessSpec struct { Cloud Cloud `json:"cloud"` Region string `json:"region"` } -// Vector is a [dense or sparse vector object] with optional metadata. +// [Vector] is a [dense or sparse vector object] with optional metadata. // // [dense or sparse vector object]: https://docs.pinecone.io/guides/get-started/key-concepts#dense-vector type Vector struct { @@ -127,14 +129,14 @@ type Vector struct { Metadata *Metadata `json:"metadata,omitempty"` } -// ScoredVector is a vector with an associated similarity score calculated according to the distance metric of the -// Index. +// [ScoredVector] is a vector with an associated similarity score calculated according to the distance metric of the +// [Index]. type ScoredVector struct { Vector *Vector `json:"vector,omitempty"` Score float32 `json:"score"` } -// SparseValues is a sparse vector objects, most commonly used for [hybrid search]. +// [SparseValues] is a sparse vector objects, most commonly used for [hybrid search]. // // [hybrid search]: https://docs.pinecone.io/guides/data/understanding-hybrid-search#hybrid-search-in-pinecone type SparseValues struct { @@ -142,28 +144,94 @@ type SparseValues struct { Values []float32 `json:"values,omitempty"` } -// NamespaceSummary is a summary of stats for a Pinecone [namespace]. +// [NamespaceSummary] is a summary of stats for a Pinecone [namespace]. // // [namespace]: https://docs.pinecone.io/guides/indexes/use-namespaces type NamespaceSummary struct { VectorCount uint32 `json:"vector_count"` } -// Usage is the usage stats ([Read Units]) for a Pinecone Index. +// [Usage] is the usage stats ([Read Units]) for a Pinecone [Index]. // // [Read Units]: https://docs.pinecone.io/guides/organizations/manage-cost/understanding-cost#serverless-indexes type Usage struct { ReadUnits uint32 `json:"read_units"` } -// MetadataFilter represents the [metadata filters] attached to a Pinecone request. +// [RerankUsage] is the usage stats ([Rerank Units]) for a reranking request. +// +// [Rerank Units]: https://docs.pinecone.io/guides/organizations/manage-cost/understanding-cost#rerank +type RerankUsage struct { + RerankUnits *int `json:"rerank_units,omitempty"` +} + +// [MetadataFilter] represents the [metadata filters] attached to a Pinecone request. // These optional metadata filters are applied to query and deletion requests. // // [metadata filters]: https://docs.pinecone.io/guides/data/filter-with-metadata#querying-an-index-with-metadata-filters type MetadataFilter = structpb.Struct -// Metadata represents optional, +// [Metadata] represents optional, // additional information that can be [attached to, or updated for, a vector] in a Pinecone Index. // // [attached to, or updated for, a vector]: https://docs.pinecone.io/guides/data/filter-with-metadata#inserting-metadata-into-an-index type Metadata = structpb.Struct + +// [Embedding] represents the embedding of a single input which is returned after [generating embeddings]. +// +// [generating embeddings]: https://docs.pinecone.io/guides/inference/generate-embeddings#3-generate-embeddings +type Embedding struct { + Values *[]float32 `json:"values,omitempty"` +} + +// [ImportStatus] represents the status of an [Import] operation. +// +// Values: +// - Cancelled: The [Import] was canceled. +// - Completed: The [Import] completed successfully. +// - Failed: The [Import] encountered an error and did not complete successfully. +// - InProgress: The [Import] is currently in progress. +// - Pending: The [Import] is pending and has not yet started. +type ImportStatus string + +const ( + Cancelled ImportStatus = "Cancelled" + Completed ImportStatus = "Completed" + Failed ImportStatus = "Failed" + InProgress ImportStatus = "InProgress" + Pending ImportStatus = "Pending" +) + +// ImportErrorMode specifies how errors are handled during an [Import]. +// +// Values: +// - Abort: The [Import] process will abort upon encountering an error. +// - Continue: The [Import] process will continue, skipping over records that produce errors. +type ImportErrorMode string + +const ( + Abort ImportErrorMode = "abort" + Continue ImportErrorMode = "continue" +) + +// [Import] represents the details and status of an import process. +// +// Fields: +// - Id: The unique identifier of the [Import] process. +// - PercentComplete: The percentage of the [Import] process that has been completed. +// - RecordsImported: The total number of records successfully imported. +// - Status: The current status of the [Import] (e.g., "InProgress", "Completed", "Failed"). +// - Uri: The URI of the source data for the [Import]. +// - CreatedAt: The time at which the [Import] process was initiated. +// - FinishedAt: The time at which the [Import] process finished (either successfully or with an error). +// - Error: If the [Import] failed, contains the error message associated with the failure. +type Import struct { + Id string `json:"id,omitempty"` + PercentComplete float32 `json:"percent_complete,omitempty"` + RecordsImported int64 `json:"records_imported,omitempty"` + Status ImportStatus `json:"status,omitempty"` + Uri string `json:"uri,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + FinishedAt *time.Time `json:"finished_at,omitempty"` + Error *string `json:"error,omitempty"` +} diff --git a/pinecone/test_suite.go b/pinecone/test_suite.go index 6596cef..3d9fc0d 100644 --- a/pinecone/test_suite.go +++ b/pinecone/test_suite.go @@ -87,7 +87,20 @@ func (ts *IntegrationTests) TearDownSuite() { _, err = WaitUntilIndexReady(ts, ctx) require.NoError(ts.T(), err) err = ts.client.DeleteIndex(ctx, ts.idxName) - require.NoError(ts.T(), err) + + // If the index failed to delete, wait a bit and retry cleaning up + // Somtimes indexes are stuck upgrading, or have pending collections + retry := 4 + for err != nil && retry > 0 { + time.Sleep(5 * time.Second) + fmt.Printf("Failed to delete index \"%s\". Retrying... (%d/4)\n", ts.idxName, 5-retry) + err = ts.client.DeleteIndex(ctx, ts.idxName) + retry-- + } + + if err != nil { + fmt.Printf("Failed to delete index \"%s\" after 4 retries: %v\n", ts.idxName, err) + } fmt.Printf("\n %s setup suite torn down successfully\n", ts.indexType) }