From feb3a8589665a564599e46dfbe7759111349a991 Mon Sep 17 00:00:00 2001 From: Austin DeNoble Date: Wed, 16 Oct 2024 15:33:00 -0400 Subject: [PATCH] Implement Bulk Import, Regenerate core for `2024-10` API (#79) ## Problem We are releasing a new version of the API this month: `2024-10`. There are 3 primary new features that are included in this release: - Import - Inference - Embed - Rerank This PR implements the operations to support import. Sorry about the size, but you can basically ignore all of the generated code under `internal/gen` unless you're curious about the new structure of the generated core files. Follow the `codgen/build-clients.sh` script for those details. ## Solution Since the import operations are technically part of the data plane but only supported via REST, they are represented in the OpenAPI spec and not our protos file. Because of this, we need to change a few things to support these operations `Client` and `IndexConnection` structs to support these operations because traditionally the code `IndexConnection` wraps was targeting gRPC-only db data operations. We now need to generate rest code for the data plane as well so we can interact with imports. - Update the `codegen/build-clients.sh` script to handle building new modules for both `internal/gen/db_data/grpc` and `internal/gen/db_data/rest`. - Update `Client` struct and move `NewClientBaseParams` into a field that can be shared more easily when constructing the `IndexConnection`. - Add `buildDataClientBaseOptions` to handle constructing the necessary rest client options for the underlying `dbDataClient`. - Add an `ensureHostHasHttps` helper as we need to make sure this is present for the index `Host` that's passed, which was not necessary for grpc. - Update `Index` method to handle calling `buildDataClientBaseOptions` and passes the new client into `newIndexConnection`. - Update `IndexConnection` to support both REST and gRPC interfaces under the hood (`restClient`, `grpcClient`). - Update `newIndexConnection` to support attaching the new `restClient` to the `IndexConnection` struct. - Update `IndexConnection` to support all import operations: `StartImport`, `ListImports`, `DescribeImport`, `CancelImport`. - Add end-to-end integration test for validating the import flow against serverless indexes. - Some nitpicky code cleanup, renaming of things around the new rest vs. grpc paradigm, etc. ## Type of Change - [ ] Bug fix (non-breaking change which fixes an issue) - [X] New feature (non-breaking change which adds functionality) - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) - [ ] This change requires a documentation update - [ ] Infrastructure change (CI configs, etc) - [ ] Non-code change (docs, etc) - [ ] None of the above: (explain here) ## Test Plan `just test` - make sure CI passes To see examples of how to use the new methods, check the doc comments. --- - To see the specific tasks where the Asana app for GitHub is being used, see below: - https://app.asana.com/0/0/1208325183834377 - https://app.asana.com/0/0/1208541827330963 --- .github/workflows/ci.yaml | 1 - codegen/apis | 2 +- codegen/build-clients.sh | 17 +- internal/gen/control/control_plane.oas.go | 1857 -------------- .../gen/db_control/db_control_2024-10.oas.go | 37 + .../grpc}/db_data_2024-10.pb.go | 11 +- .../grpc}/db_data_2024-10_grpc.pb.go | 2 +- .../gen/db_data/rest/db_data_2024-10.oas.go | 2280 +++++++++++++++++ .../gen/inference/inference_2024-10.oas.go | 70 +- pinecone/client.go | 60 +- pinecone/client_test.go | 16 +- pinecone/index_connection.go | 430 +++- pinecone/index_connection_test.go | 124 +- pinecone/models.go | 54 + pinecone/test_suite.go | 15 +- 15 files changed, 2966 insertions(+), 2010 deletions(-) delete mode 100644 internal/gen/control/control_plane.oas.go rename internal/gen/{data => db_data/grpc}/db_data_2024-10.pb.go (99%) rename internal/gen/{data => db_data/grpc}/db_data_2024-10_grpc.pb.go (99%) create mode 100644 internal/gen/db_data/rest/db_data_2024-10.oas.go diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0771024..424739d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -34,7 +34,6 @@ jobs: run: | go get ./pinecone - name: Run tests - continue-on-error: true run: go test -count=1 -v ./pinecone env: PINECONE_API_KEY: ${{ secrets.API_KEY }} diff --git a/codegen/apis b/codegen/apis index 3b7369b..3002f1e 160000 --- a/codegen/apis +++ b/codegen/apis @@ -1 +1 @@ -Subproject commit 3b7369b89469ddc6ef4c08d3f8df905302966624 +Subproject commit 3002f1e62b895d2e64fba53e346ad84eb4719934 diff --git a/codegen/build-clients.sh b/codegen/build-clients.sh index 0c45f5a..be42e60 100755 --- a/codegen/build-clients.sh +++ b/codegen/build-clients.sh @@ -7,16 +7,19 @@ db_control_module="db_control" db_data_module="db_data" inference_module="inference" -# generated output destination paths +# generated grpc output destination paths # db_data_destination must align with the option go_package in the proto file: # https://github.com/pinecone-io/apis/blob/d1d005e75cc9fe9a5c486ef9218fe87b57765961/src/release/db/data/data.proto#L3 -db_data_destination="internal/gen/data" +db_data_destination="internal/gen/${db_data_module}" db_control_destination="internal/gen/${db_control_module}" inference_destination="internal/gen/${inference_module}" # version file version_file="internal/gen/api_version.go" -# generated oas files + +# generated oas file destination paths +db_data_rest_destination="${db_data_destination}/rest" +db_data_oas_file="${db_data_rest_destination}/${db_data_module}_${version}.oas.go" db_control_oas_file="${db_control_destination}/${db_control_module}_${version}.oas.go" inference_oas_file="${inference_destination}/${inference_module}_${version}.oas.go" @@ -92,6 +95,9 @@ EOL update_apis_repo verify_spec_version $version +# Clear internal/gen/* contents +rm -rf internal/gen/* + # Generate db_control oas client rm -rf "${db_control_destination}" mkdir -p "${db_control_destination}" @@ -102,9 +108,12 @@ rm -rf "${inference_destination}" mkdir -p "${inference_destination}" generate_oas_client $inference_module $inference_oas_file -# Generate db_data proto client +# Generate db_data oas and proto clients rm -rf "${db_data_destination}" mkdir -p "${db_data_destination}" +mkdir -p "${db_data_rest_destination}" + +generate_oas_client $db_data_module $db_data_oas_file generate_proto_client $db_data_module # Generate version file diff --git a/internal/gen/control/control_plane.oas.go b/internal/gen/control/control_plane.oas.go deleted file mode 100644 index 5088e89..0000000 --- a/internal/gen/control/control_plane.oas.go +++ /dev/null @@ -1,1857 +0,0 @@ -// Package db_control provides primitives to interact with the openapi HTTP API. -// -// Code generated by github.com/oapi-codegen/oapi-codegen/v2 version v2.3.0 DO NOT EDIT. -package db_control - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "io" - "net/http" - "net/url" - "strings" - - "github.com/oapi-codegen/runtime" -) - -const ( - ApiKeyAuthScopes = "ApiKeyAuth.Scopes" -) - -// Defines values for CollectionModelStatus. -const ( - CollectionModelStatusInitializing CollectionModelStatus = "Initializing" - CollectionModelStatusReady CollectionModelStatus = "Ready" - CollectionModelStatusTerminating CollectionModelStatus = "Terminating" -) - -// Defines values for CreateIndexRequestMetric. -const ( - CreateIndexRequestMetricCosine CreateIndexRequestMetric = "cosine" - CreateIndexRequestMetricDotproduct CreateIndexRequestMetric = "dotproduct" - CreateIndexRequestMetricEuclidean CreateIndexRequestMetric = "euclidean" -) - -// Defines values for DeletionProtection. -const ( - Disabled DeletionProtection = "disabled" - Enabled DeletionProtection = "enabled" -) - -// Defines values for ErrorResponseErrorCode. -const ( - ABORTED ErrorResponseErrorCode = "ABORTED" - ALREADYEXISTS ErrorResponseErrorCode = "ALREADY_EXISTS" - DATALOSS ErrorResponseErrorCode = "DATA_LOSS" - DEADLINEEXCEEDED ErrorResponseErrorCode = "DEADLINE_EXCEEDED" - FAILEDPRECONDITION ErrorResponseErrorCode = "FAILED_PRECONDITION" - FORBIDDEN ErrorResponseErrorCode = "FORBIDDEN" - INTERNAL ErrorResponseErrorCode = "INTERNAL" - INVALIDARGUMENT ErrorResponseErrorCode = "INVALID_ARGUMENT" - NOTFOUND ErrorResponseErrorCode = "NOT_FOUND" - OK ErrorResponseErrorCode = "OK" - OUTOFRANGE ErrorResponseErrorCode = "OUT_OF_RANGE" - PERMISSIONDENIED ErrorResponseErrorCode = "PERMISSION_DENIED" - QUOTAEXCEEDED ErrorResponseErrorCode = "QUOTA_EXCEEDED" - RESOURCEEXHAUSTED ErrorResponseErrorCode = "RESOURCE_EXHAUSTED" - UNAUTHENTICATED ErrorResponseErrorCode = "UNAUTHENTICATED" - UNAVAILABLE ErrorResponseErrorCode = "UNAVAILABLE" - UNIMPLEMENTED ErrorResponseErrorCode = "UNIMPLEMENTED" - UNKNOWN ErrorResponseErrorCode = "UNKNOWN" - UNPROCESSABLEENTITY ErrorResponseErrorCode = "UNPROCESSABLE_ENTITY" -) - -// Defines values for IndexModelMetric. -const ( - IndexModelMetricCosine IndexModelMetric = "cosine" - IndexModelMetricDotproduct IndexModelMetric = "dotproduct" - IndexModelMetricEuclidean IndexModelMetric = "euclidean" -) - -// Defines values for IndexModelStatusState. -const ( - IndexModelStatusStateInitializationFailed IndexModelStatusState = "InitializationFailed" - IndexModelStatusStateInitializing IndexModelStatusState = "Initializing" - IndexModelStatusStateReady IndexModelStatusState = "Ready" - IndexModelStatusStateScalingDown IndexModelStatusState = "ScalingDown" - IndexModelStatusStateScalingDownPodSize IndexModelStatusState = "ScalingDownPodSize" - IndexModelStatusStateScalingUp IndexModelStatusState = "ScalingUp" - IndexModelStatusStateScalingUpPodSize IndexModelStatusState = "ScalingUpPodSize" - IndexModelStatusStateTerminating IndexModelStatusState = "Terminating" -) - -// Defines values for ServerlessSpecCloud. -const ( - Aws ServerlessSpecCloud = "aws" - Azure ServerlessSpecCloud = "azure" - Gcp ServerlessSpecCloud = "gcp" -) - -// CollectionList The list of collections that exist in the project. -type CollectionList struct { - Collections *[]CollectionModel `json:"collections,omitempty"` -} - -// CollectionModel The CollectionModel describes the configuration and status of a Pinecone collection. -type CollectionModel struct { - // Dimension The dimension of the vectors stored in each record held in the collection. - Dimension *int32 `json:"dimension,omitempty"` - - // Environment The environment where the collection is hosted. - Environment string `json:"environment"` - - // Name The name of the collection. - Name string `json:"name"` - - // Size The size of the collection in bytes. - Size *int64 `json:"size,omitempty"` - - // Status The status of the collection. - Status CollectionModelStatus `json:"status"` - - // VectorCount The number of records stored in the collection. - VectorCount *int32 `json:"vector_count,omitempty"` -} - -// CollectionModelStatus The status of the collection. -type CollectionModelStatus string - -// ConfigureIndexRequest Configuration used to scale an index. -type ConfigureIndexRequest struct { - // DeletionProtection Whether [deletion protection](http://docs.pinecone.io/guides/indexes/prevent-index-deletion) is enabled/disabled for the index. - DeletionProtection *DeletionProtection `json:"deletion_protection,omitempty"` - Spec *struct { - Pod struct { - // PodType The type of pod to use. One of `s1`, `p1`, or `p2` appended with `.` and one of `x1`, `x2`, `x4`, or `x8`. - PodType *string `json:"pod_type,omitempty"` - - // Replicas The number of replicas. Replicas duplicate your index. They provide higher availability and throughput. Replicas can be scaled up or down as your needs change. - Replicas *int32 `json:"replicas,omitempty"` - } `json:"pod"` - } `json:"spec,omitempty"` -} - -// CreateCollectionRequest The configuration needed to create a Pinecone collection. -type CreateCollectionRequest struct { - // Name The name of the collection to be created. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. - Name string `json:"name"` - - // Source The name of the index to be used as the source for the collection. - Source string `json:"source"` -} - -// CreateIndexRequest The configuration needed to create a Pinecone index. -type CreateIndexRequest struct { - // DeletionProtection Whether [deletion protection](http://docs.pinecone.io/guides/indexes/prevent-index-deletion) is enabled/disabled for the index. - DeletionProtection *DeletionProtection `json:"deletion_protection,omitempty"` - - // Dimension The dimensions of the vectors to be inserted in the index. - Dimension int32 `json:"dimension"` - - // Metric The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. - Metric *CreateIndexRequestMetric `json:"metric,omitempty"` - - // Name The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. - Name string `json:"name"` - - // Spec The spec object defines how the index should be deployed. - // - // For serverless indexes, you define only the [cloud and region](http://docs.pinecone.io/guides/indexes/understanding-indexes#cloud-regions) where the index should be hosted. For pod-based indexes, you define the [environment](http://docs.pinecone.io/guides/indexes/understanding-indexes#pod-environments) where the index should be hosted, the [pod type and size](http://docs.pinecone.io/guides/indexes/understanding-indexes#pod-types) to use, and other index characteristics. - Spec IndexSpec `json:"spec"` -} - -// CreateIndexRequestMetric The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. -type CreateIndexRequestMetric string - -// DeletionProtection Whether [deletion protection](http://docs.pinecone.io/guides/indexes/prevent-index-deletion) is enabled/disabled for the index. -type DeletionProtection string - -// ErrorResponse The response shape used for all error responses. -type ErrorResponse struct { - // Error Detailed information about the error that occurred. - Error struct { - Code ErrorResponseErrorCode `json:"code"` - - // Details Additional information about the error. This field is not guaranteed to be present. - Details *map[string]interface{} `json:"details,omitempty"` - Message string `json:"message"` - } `json:"error"` - - // Status The HTTP status code of the error. - Status int `json:"status"` -} - -// ErrorResponseErrorCode defines model for ErrorResponse.Error.Code. -type ErrorResponseErrorCode string - -// IndexList The list of indexes that exist in the project. -type IndexList struct { - Indexes *[]IndexModel `json:"indexes,omitempty"` -} - -// IndexModel The IndexModel describes the configuration and status of a Pinecone index. -type IndexModel struct { - // DeletionProtection Whether [deletion protection](http://docs.pinecone.io/guides/indexes/prevent-index-deletion) is enabled/disabled for the index. - DeletionProtection *DeletionProtection `json:"deletion_protection,omitempty"` - - // Dimension The dimensions of the vectors to be inserted in the index. - Dimension int32 `json:"dimension"` - - // Host The URL address where the index is hosted. - Host string `json:"host"` - - // Metric The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. - Metric IndexModelMetric `json:"metric"` - - // Name The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. - Name string `json:"name"` - Spec struct { - // Pod Configuration needed to deploy a pod-based index. - Pod *PodSpec `json:"pod,omitempty"` - - // Serverless Configuration needed to deploy a serverless index. - Serverless *ServerlessSpec `json:"serverless,omitempty"` - } `json:"spec"` - Status struct { - Ready bool `json:"ready"` - State IndexModelStatusState `json:"state"` - } `json:"status"` -} - -// IndexModelMetric The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. -type IndexModelMetric string - -// IndexModelStatusState defines model for IndexModel.Status.State. -type IndexModelStatusState string - -// IndexSpec The spec object defines how the index should be deployed. -// -// For serverless indexes, you define only the [cloud and region](http://docs.pinecone.io/guides/indexes/understanding-indexes#cloud-regions) where the index should be hosted. For pod-based indexes, you define the [environment](http://docs.pinecone.io/guides/indexes/understanding-indexes#pod-environments) where the index should be hosted, the [pod type and size](http://docs.pinecone.io/guides/indexes/understanding-indexes#pod-types) to use, and other index characteristics. -type IndexSpec struct { - // Pod Configuration needed to deploy a pod-based index. - Pod *PodSpec `json:"pod,omitempty"` - - // Serverless Configuration needed to deploy a serverless index. - Serverless *ServerlessSpec `json:"serverless,omitempty"` - union json.RawMessage -} - -// IndexSpec0 defines model for . -type IndexSpec0 = interface{} - -// IndexSpec1 defines model for . -type IndexSpec1 = interface{} - -// PodSpec Configuration needed to deploy a pod-based index. -type PodSpec struct { - // Environment The environment where the index is hosted. - Environment string `json:"environment"` - - // MetadataConfig Configuration for the behavior of Pinecone's internal metadata index. By default, all metadata is indexed; when `metadata_config` is present, only specified metadata fields are indexed. These configurations are only valid for use with pod-based indexes. - MetadataConfig *struct { - // Indexed By default, all metadata is indexed; to change this behavior, use this property to specify an array of metadata fields that should be indexed. - Indexed *[]string `json:"indexed,omitempty"` - } `json:"metadata_config,omitempty"` - - // PodType The type of pod to use. One of `s1`, `p1`, or `p2` appended with `.` and one of `x1`, `x2`, `x4`, or `x8`. - PodType string `json:"pod_type"` - - // Pods The number of pods to be used in the index. This should be equal to `shards` x `replicas`.' - Pods int `json:"pods"` - - // Replicas The number of replicas. Replicas duplicate your index. They provide higher availability and throughput. Replicas can be scaled up or down as your needs change. - Replicas int32 `json:"replicas"` - - // Shards The number of shards. Shards split your data across multiple pods so you can fit more data into an index. - Shards int32 `json:"shards"` - - // SourceCollection The name of the collection to be used as the source for the index. - SourceCollection *string `json:"source_collection,omitempty"` -} - -// ServerlessSpec Configuration needed to deploy a serverless index. -type ServerlessSpec struct { - // Cloud The public cloud where you would like your index hosted. - Cloud ServerlessSpecCloud `json:"cloud"` - - // Region The region where you would like your index to be created. - Region string `json:"region"` -} - -// ServerlessSpecCloud The public cloud where you would like your index hosted. -type ServerlessSpecCloud string - -// CreateCollectionJSONRequestBody defines body for CreateCollection for application/json ContentType. -type CreateCollectionJSONRequestBody = CreateCollectionRequest - -// CreateIndexJSONRequestBody defines body for CreateIndex for application/json ContentType. -type CreateIndexJSONRequestBody = CreateIndexRequest - -// ConfigureIndexJSONRequestBody defines body for ConfigureIndex for application/json ContentType. -type ConfigureIndexJSONRequestBody = ConfigureIndexRequest - -// AsIndexSpec0 returns the union data inside the IndexSpec as a IndexSpec0 -func (t IndexSpec) AsIndexSpec0() (IndexSpec0, error) { - var body IndexSpec0 - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromIndexSpec0 overwrites any union data inside the IndexSpec as the provided IndexSpec0 -func (t *IndexSpec) FromIndexSpec0(v IndexSpec0) error { - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeIndexSpec0 performs a merge with any union data inside the IndexSpec, using the provided IndexSpec0 -func (t *IndexSpec) MergeIndexSpec0(v IndexSpec0) error { - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsIndexSpec1 returns the union data inside the IndexSpec as a IndexSpec1 -func (t IndexSpec) AsIndexSpec1() (IndexSpec1, error) { - var body IndexSpec1 - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromIndexSpec1 overwrites any union data inside the IndexSpec as the provided IndexSpec1 -func (t *IndexSpec) FromIndexSpec1(v IndexSpec1) error { - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeIndexSpec1 performs a merge with any union data inside the IndexSpec, using the provided IndexSpec1 -func (t *IndexSpec) MergeIndexSpec1(v IndexSpec1) error { - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -func (t IndexSpec) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - if err != nil { - return nil, err - } - object := make(map[string]json.RawMessage) - if t.union != nil { - err = json.Unmarshal(b, &object) - if err != nil { - return nil, err - } - } - - if t.Pod != nil { - object["pod"], err = json.Marshal(t.Pod) - if err != nil { - return nil, fmt.Errorf("error marshaling 'pod': %w", err) - } - } - - if t.Serverless != nil { - object["serverless"], err = json.Marshal(t.Serverless) - if err != nil { - return nil, fmt.Errorf("error marshaling 'serverless': %w", err) - } - } - b, err = json.Marshal(object) - return b, err -} - -func (t *IndexSpec) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - if err != nil { - return err - } - object := make(map[string]json.RawMessage) - err = json.Unmarshal(b, &object) - if err != nil { - return err - } - - if raw, found := object["pod"]; found { - err = json.Unmarshal(raw, &t.Pod) - if err != nil { - return fmt.Errorf("error reading 'pod': %w", err) - } - } - - if raw, found := object["serverless"]; found { - err = json.Unmarshal(raw, &t.Serverless) - if err != nil { - return fmt.Errorf("error reading 'serverless': %w", err) - } - } - - return err -} - -// RequestEditorFn is the function signature for the RequestEditor callback function -type RequestEditorFn func(ctx context.Context, req *http.Request) error - -// Doer performs HTTP requests. -// -// The standard http.Client implements this interface. -type HttpRequestDoer interface { - Do(req *http.Request) (*http.Response, error) -} - -// Client which conforms to the OpenAPI3 specification for this service. -type Client struct { - // The endpoint of the server conforming to this interface, with scheme, - // https://api.deepmap.com for example. This can contain a path relative - // to the server, such as https://api.deepmap.com/dev-test, and all the - // paths in the swagger spec will be appended to the server. - Server string - - // Doer for performing requests, typically a *http.Client with any - // customized settings, such as certificate chains. - Client HttpRequestDoer - - // A list of callbacks for modifying requests which are generated before sending over - // the network. - RequestEditors []RequestEditorFn -} - -// ClientOption allows setting custom parameters during construction -type ClientOption func(*Client) error - -// Creates a new Client, with reasonable defaults -func NewClient(server string, opts ...ClientOption) (*Client, error) { - // create a client with sane default values - client := Client{ - Server: server, - } - // mutate client and add all optional params - for _, o := range opts { - if err := o(&client); err != nil { - return nil, err - } - } - // ensure the server URL always has a trailing slash - if !strings.HasSuffix(client.Server, "/") { - client.Server += "/" - } - // create httpClient, if not already present - if client.Client == nil { - client.Client = &http.Client{} - } - return &client, nil -} - -// WithHTTPClient allows overriding the default Doer, which is -// automatically created using http.Client. This is useful for tests. -func WithHTTPClient(doer HttpRequestDoer) ClientOption { - return func(c *Client) error { - c.Client = doer - return nil - } -} - -// WithRequestEditorFn allows setting up a callback function, which will be -// called right before sending the request. This can be used to mutate the request. -func WithRequestEditorFn(fn RequestEditorFn) ClientOption { - return func(c *Client) error { - c.RequestEditors = append(c.RequestEditors, fn) - return nil - } -} - -// The interface specification for the client above. -type ClientInterface interface { - // ListCollections request - ListCollections(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) - - // CreateCollectionWithBody request with any body - CreateCollectionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) - - CreateCollection(ctx context.Context, body CreateCollectionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) - - // DeleteCollection request - DeleteCollection(ctx context.Context, collectionName string, reqEditors ...RequestEditorFn) (*http.Response, error) - - // DescribeCollection request - DescribeCollection(ctx context.Context, collectionName string, reqEditors ...RequestEditorFn) (*http.Response, error) - - // ListIndexes request - ListIndexes(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) - - // CreateIndexWithBody request with any body - CreateIndexWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) - - CreateIndex(ctx context.Context, body CreateIndexJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) - - // DeleteIndex request - DeleteIndex(ctx context.Context, indexName string, reqEditors ...RequestEditorFn) (*http.Response, error) - - // DescribeIndex request - DescribeIndex(ctx context.Context, indexName string, reqEditors ...RequestEditorFn) (*http.Response, error) - - // ConfigureIndexWithBody request with any body - ConfigureIndexWithBody(ctx context.Context, indexName string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) - - ConfigureIndex(ctx context.Context, indexName string, body ConfigureIndexJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) -} - -func (c *Client) ListCollections(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewListCollectionsRequest(c.Server) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) CreateCollectionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewCreateCollectionRequestWithBody(c.Server, contentType, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) CreateCollection(ctx context.Context, body CreateCollectionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewCreateCollectionRequest(c.Server, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) DeleteCollection(ctx context.Context, collectionName string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewDeleteCollectionRequest(c.Server, collectionName) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) DescribeCollection(ctx context.Context, collectionName string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewDescribeCollectionRequest(c.Server, collectionName) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) ListIndexes(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewListIndexesRequest(c.Server) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) CreateIndexWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewCreateIndexRequestWithBody(c.Server, contentType, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) CreateIndex(ctx context.Context, body CreateIndexJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewCreateIndexRequest(c.Server, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) DeleteIndex(ctx context.Context, indexName string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewDeleteIndexRequest(c.Server, indexName) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) DescribeIndex(ctx context.Context, indexName string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewDescribeIndexRequest(c.Server, indexName) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) ConfigureIndexWithBody(ctx context.Context, indexName string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewConfigureIndexRequestWithBody(c.Server, indexName, contentType, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) ConfigureIndex(ctx context.Context, indexName string, body ConfigureIndexJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewConfigureIndexRequest(c.Server, indexName, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -// NewListCollectionsRequest generates requests for ListCollections -func NewListCollectionsRequest(server string) (*http.Request, error) { - var err error - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/collections") - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err - } - - return req, nil -} - -// NewCreateCollectionRequest calls the generic CreateCollection builder with application/json body -func NewCreateCollectionRequest(server string, body CreateCollectionJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewCreateCollectionRequestWithBody(server, "application/json", bodyReader) -} - -// NewCreateCollectionRequestWithBody generates requests for CreateCollection with any type of body -func NewCreateCollectionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { - var err error - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/collections") - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("POST", queryURL.String(), body) - if err != nil { - return nil, err - } - - req.Header.Add("Content-Type", contentType) - - return req, nil -} - -// NewDeleteCollectionRequest generates requests for DeleteCollection -func NewDeleteCollectionRequest(server string, collectionName string) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "collection_name", runtime.ParamLocationPath, collectionName) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/collections/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("DELETE", queryURL.String(), nil) - if err != nil { - return nil, err - } - - return req, nil -} - -// NewDescribeCollectionRequest generates requests for DescribeCollection -func NewDescribeCollectionRequest(server string, collectionName string) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "collection_name", runtime.ParamLocationPath, collectionName) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/collections/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err - } - - return req, nil -} - -// NewListIndexesRequest generates requests for ListIndexes -func NewListIndexesRequest(server string) (*http.Request, error) { - var err error - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/indexes") - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err - } - - return req, nil -} - -// NewCreateIndexRequest calls the generic CreateIndex builder with application/json body -func NewCreateIndexRequest(server string, body CreateIndexJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewCreateIndexRequestWithBody(server, "application/json", bodyReader) -} - -// NewCreateIndexRequestWithBody generates requests for CreateIndex with any type of body -func NewCreateIndexRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { - var err error - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/indexes") - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("POST", queryURL.String(), body) - if err != nil { - return nil, err - } - - req.Header.Add("Content-Type", contentType) - - return req, nil -} - -// NewDeleteIndexRequest generates requests for DeleteIndex -func NewDeleteIndexRequest(server string, indexName string) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "index_name", runtime.ParamLocationPath, indexName) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/indexes/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("DELETE", queryURL.String(), nil) - if err != nil { - return nil, err - } - - return req, nil -} - -// NewDescribeIndexRequest generates requests for DescribeIndex -func NewDescribeIndexRequest(server string, indexName string) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "index_name", runtime.ParamLocationPath, indexName) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/indexes/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err - } - - return req, nil -} - -// NewConfigureIndexRequest calls the generic ConfigureIndex builder with application/json body -func NewConfigureIndexRequest(server string, indexName string, body ConfigureIndexJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewConfigureIndexRequestWithBody(server, indexName, "application/json", bodyReader) -} - -// NewConfigureIndexRequestWithBody generates requests for ConfigureIndex with any type of body -func NewConfigureIndexRequestWithBody(server string, indexName string, contentType string, body io.Reader) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "index_name", runtime.ParamLocationPath, indexName) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/indexes/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("PATCH", queryURL.String(), body) - if err != nil { - return nil, err - } - - req.Header.Add("Content-Type", contentType) - - return req, nil -} - -func (c *Client) applyEditors(ctx context.Context, req *http.Request, additionalEditors []RequestEditorFn) error { - for _, r := range c.RequestEditors { - if err := r(ctx, req); err != nil { - return err - } - } - for _, r := range additionalEditors { - if err := r(ctx, req); err != nil { - return err - } - } - return nil -} - -// ClientWithResponses builds on ClientInterface to offer response payloads -type ClientWithResponses struct { - ClientInterface -} - -// NewClientWithResponses creates a new ClientWithResponses, which wraps -// Client with return type handling -func NewClientWithResponses(server string, opts ...ClientOption) (*ClientWithResponses, error) { - client, err := NewClient(server, opts...) - if err != nil { - return nil, err - } - return &ClientWithResponses{client}, nil -} - -// WithBaseURL overrides the baseURL. -func WithBaseURL(baseURL string) ClientOption { - return func(c *Client) error { - newBaseURL, err := url.Parse(baseURL) - if err != nil { - return err - } - c.Server = newBaseURL.String() - return nil - } -} - -// ClientWithResponsesInterface is the interface specification for the client with responses above. -type ClientWithResponsesInterface interface { - // ListCollectionsWithResponse request - ListCollectionsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ListCollectionsResponse, error) - - // CreateCollectionWithBodyWithResponse request with any body - CreateCollectionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateCollectionResponse, error) - - CreateCollectionWithResponse(ctx context.Context, body CreateCollectionJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateCollectionResponse, error) - - // DeleteCollectionWithResponse request - DeleteCollectionWithResponse(ctx context.Context, collectionName string, reqEditors ...RequestEditorFn) (*DeleteCollectionResponse, error) - - // DescribeCollectionWithResponse request - DescribeCollectionWithResponse(ctx context.Context, collectionName string, reqEditors ...RequestEditorFn) (*DescribeCollectionResponse, error) - - // ListIndexesWithResponse request - ListIndexesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ListIndexesResponse, error) - - // CreateIndexWithBodyWithResponse request with any body - CreateIndexWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateIndexResponse, error) - - CreateIndexWithResponse(ctx context.Context, body CreateIndexJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateIndexResponse, error) - - // DeleteIndexWithResponse request - DeleteIndexWithResponse(ctx context.Context, indexName string, reqEditors ...RequestEditorFn) (*DeleteIndexResponse, error) - - // DescribeIndexWithResponse request - DescribeIndexWithResponse(ctx context.Context, indexName string, reqEditors ...RequestEditorFn) (*DescribeIndexResponse, error) - - // ConfigureIndexWithBodyWithResponse request with any body - ConfigureIndexWithBodyWithResponse(ctx context.Context, indexName string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ConfigureIndexResponse, error) - - ConfigureIndexWithResponse(ctx context.Context, indexName string, body ConfigureIndexJSONRequestBody, reqEditors ...RequestEditorFn) (*ConfigureIndexResponse, error) -} - -type ListCollectionsResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *CollectionList - JSON401 *ErrorResponse - JSON500 *ErrorResponse -} - -// Status returns HTTPResponse.Status -func (r ListCollectionsResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r ListCollectionsResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type CreateCollectionResponse struct { - Body []byte - HTTPResponse *http.Response - JSON201 *CollectionModel - JSON400 *ErrorResponse - JSON401 *ErrorResponse - JSON403 *ErrorResponse - JSON409 *ErrorResponse - JSON422 *ErrorResponse - JSON500 *ErrorResponse -} - -// Status returns HTTPResponse.Status -func (r CreateCollectionResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r CreateCollectionResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type DeleteCollectionResponse struct { - Body []byte - HTTPResponse *http.Response - JSON401 *ErrorResponse - JSON404 *ErrorResponse - JSON500 *ErrorResponse -} - -// Status returns HTTPResponse.Status -func (r DeleteCollectionResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r DeleteCollectionResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type DescribeCollectionResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *CollectionModel - JSON401 *ErrorResponse - JSON404 *ErrorResponse - JSON500 *ErrorResponse -} - -// Status returns HTTPResponse.Status -func (r DescribeCollectionResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r DescribeCollectionResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type ListIndexesResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *IndexList - JSON401 *ErrorResponse - JSON500 *ErrorResponse -} - -// Status returns HTTPResponse.Status -func (r ListIndexesResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r ListIndexesResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type CreateIndexResponse struct { - Body []byte - HTTPResponse *http.Response - JSON201 *IndexModel - JSON400 *ErrorResponse - JSON401 *ErrorResponse - JSON403 *ErrorResponse - JSON404 *ErrorResponse - JSON409 *ErrorResponse - JSON422 *ErrorResponse - JSON500 *ErrorResponse -} - -// Status returns HTTPResponse.Status -func (r CreateIndexResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r CreateIndexResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type DeleteIndexResponse struct { - Body []byte - HTTPResponse *http.Response - JSON401 *ErrorResponse - JSON404 *ErrorResponse - JSON412 *ErrorResponse - JSON500 *ErrorResponse -} - -// Status returns HTTPResponse.Status -func (r DeleteIndexResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r DeleteIndexResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type DescribeIndexResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *IndexModel - JSON401 *ErrorResponse - JSON404 *ErrorResponse - JSON500 *ErrorResponse -} - -// Status returns HTTPResponse.Status -func (r DescribeIndexResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r DescribeIndexResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type ConfigureIndexResponse struct { - Body []byte - HTTPResponse *http.Response - JSON202 *IndexModel - JSON400 *ErrorResponse - JSON401 *ErrorResponse - JSON403 *ErrorResponse - JSON404 *ErrorResponse - JSON422 *ErrorResponse - JSON500 *ErrorResponse -} - -// Status returns HTTPResponse.Status -func (r ConfigureIndexResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r ConfigureIndexResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -// ListCollectionsWithResponse request returning *ListCollectionsResponse -func (c *ClientWithResponses) ListCollectionsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ListCollectionsResponse, error) { - rsp, err := c.ListCollections(ctx, reqEditors...) - if err != nil { - return nil, err - } - return ParseListCollectionsResponse(rsp) -} - -// CreateCollectionWithBodyWithResponse request with arbitrary body returning *CreateCollectionResponse -func (c *ClientWithResponses) CreateCollectionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateCollectionResponse, error) { - rsp, err := c.CreateCollectionWithBody(ctx, contentType, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseCreateCollectionResponse(rsp) -} - -func (c *ClientWithResponses) CreateCollectionWithResponse(ctx context.Context, body CreateCollectionJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateCollectionResponse, error) { - rsp, err := c.CreateCollection(ctx, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseCreateCollectionResponse(rsp) -} - -// DeleteCollectionWithResponse request returning *DeleteCollectionResponse -func (c *ClientWithResponses) DeleteCollectionWithResponse(ctx context.Context, collectionName string, reqEditors ...RequestEditorFn) (*DeleteCollectionResponse, error) { - rsp, err := c.DeleteCollection(ctx, collectionName, reqEditors...) - if err != nil { - return nil, err - } - return ParseDeleteCollectionResponse(rsp) -} - -// DescribeCollectionWithResponse request returning *DescribeCollectionResponse -func (c *ClientWithResponses) DescribeCollectionWithResponse(ctx context.Context, collectionName string, reqEditors ...RequestEditorFn) (*DescribeCollectionResponse, error) { - rsp, err := c.DescribeCollection(ctx, collectionName, reqEditors...) - if err != nil { - return nil, err - } - return ParseDescribeCollectionResponse(rsp) -} - -// ListIndexesWithResponse request returning *ListIndexesResponse -func (c *ClientWithResponses) ListIndexesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ListIndexesResponse, error) { - rsp, err := c.ListIndexes(ctx, reqEditors...) - if err != nil { - return nil, err - } - return ParseListIndexesResponse(rsp) -} - -// CreateIndexWithBodyWithResponse request with arbitrary body returning *CreateIndexResponse -func (c *ClientWithResponses) CreateIndexWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateIndexResponse, error) { - rsp, err := c.CreateIndexWithBody(ctx, contentType, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseCreateIndexResponse(rsp) -} - -func (c *ClientWithResponses) CreateIndexWithResponse(ctx context.Context, body CreateIndexJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateIndexResponse, error) { - rsp, err := c.CreateIndex(ctx, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseCreateIndexResponse(rsp) -} - -// DeleteIndexWithResponse request returning *DeleteIndexResponse -func (c *ClientWithResponses) DeleteIndexWithResponse(ctx context.Context, indexName string, reqEditors ...RequestEditorFn) (*DeleteIndexResponse, error) { - rsp, err := c.DeleteIndex(ctx, indexName, reqEditors...) - if err != nil { - return nil, err - } - return ParseDeleteIndexResponse(rsp) -} - -// DescribeIndexWithResponse request returning *DescribeIndexResponse -func (c *ClientWithResponses) DescribeIndexWithResponse(ctx context.Context, indexName string, reqEditors ...RequestEditorFn) (*DescribeIndexResponse, error) { - rsp, err := c.DescribeIndex(ctx, indexName, reqEditors...) - if err != nil { - return nil, err - } - return ParseDescribeIndexResponse(rsp) -} - -// ConfigureIndexWithBodyWithResponse request with arbitrary body returning *ConfigureIndexResponse -func (c *ClientWithResponses) ConfigureIndexWithBodyWithResponse(ctx context.Context, indexName string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ConfigureIndexResponse, error) { - rsp, err := c.ConfigureIndexWithBody(ctx, indexName, contentType, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseConfigureIndexResponse(rsp) -} - -func (c *ClientWithResponses) ConfigureIndexWithResponse(ctx context.Context, indexName string, body ConfigureIndexJSONRequestBody, reqEditors ...RequestEditorFn) (*ConfigureIndexResponse, error) { - rsp, err := c.ConfigureIndex(ctx, indexName, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseConfigureIndexResponse(rsp) -} - -// ParseListCollectionsResponse parses an HTTP response from a ListCollectionsWithResponse call -func ParseListCollectionsResponse(rsp *http.Response) (*ListCollectionsResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &ListCollectionsResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest CollectionList - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON500 = &dest - - } - - return response, nil -} - -// ParseCreateCollectionResponse parses an HTTP response from a CreateCollectionWithResponse call -func ParseCreateCollectionResponse(rsp *http.Response) (*CreateCollectionResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &CreateCollectionResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: - var dest CollectionModel - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON201 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON403 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON409 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 422: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON422 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON500 = &dest - - } - - return response, nil -} - -// ParseDeleteCollectionResponse parses an HTTP response from a DeleteCollectionWithResponse call -func ParseDeleteCollectionResponse(rsp *http.Response) (*DeleteCollectionResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &DeleteCollectionResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON404 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON500 = &dest - - } - - return response, nil -} - -// ParseDescribeCollectionResponse parses an HTTP response from a DescribeCollectionWithResponse call -func ParseDescribeCollectionResponse(rsp *http.Response) (*DescribeCollectionResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &DescribeCollectionResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest CollectionModel - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON404 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON500 = &dest - - } - - return response, nil -} - -// ParseListIndexesResponse parses an HTTP response from a ListIndexesWithResponse call -func ParseListIndexesResponse(rsp *http.Response) (*ListIndexesResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &ListIndexesResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest IndexList - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON500 = &dest - - } - - return response, nil -} - -// ParseCreateIndexResponse parses an HTTP response from a CreateIndexWithResponse call -func ParseCreateIndexResponse(rsp *http.Response) (*CreateIndexResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &CreateIndexResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 201: - var dest IndexModel - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON201 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON403 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON404 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON409 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 422: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON422 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON500 = &dest - - } - - return response, nil -} - -// ParseDeleteIndexResponse parses an HTTP response from a DeleteIndexWithResponse call -func ParseDeleteIndexResponse(rsp *http.Response) (*DeleteIndexResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &DeleteIndexResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON404 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 412: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON412 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON500 = &dest - - } - - return response, nil -} - -// ParseDescribeIndexResponse parses an HTTP response from a DescribeIndexWithResponse call -func ParseDescribeIndexResponse(rsp *http.Response) (*DescribeIndexResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &DescribeIndexResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest IndexModel - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON404 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON500 = &dest - - } - - return response, nil -} - -// ParseConfigureIndexResponse parses an HTTP response from a ConfigureIndexWithResponse call -func ParseConfigureIndexResponse(rsp *http.Response) (*ConfigureIndexResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &ConfigureIndexResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 202: - var dest IndexModel - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON202 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON403 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON404 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 422: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON422 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: - var dest ErrorResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON500 = &dest - - } - - return response, nil -} diff --git a/internal/gen/db_control/db_control_2024-10.oas.go b/internal/gen/db_control/db_control_2024-10.oas.go index 5088e89..9415ad2 100644 --- a/internal/gen/db_control/db_control_2024-10.oas.go +++ b/internal/gen/db_control/db_control_2024-10.oas.go @@ -53,6 +53,7 @@ const ( NOTFOUND ErrorResponseErrorCode = "NOT_FOUND" OK ErrorResponseErrorCode = "OK" OUTOFRANGE ErrorResponseErrorCode = "OUT_OF_RANGE" + PAYMENTREQUIRED ErrorResponseErrorCode = "PAYMENT_REQUIRED" PERMISSIONDENIED ErrorResponseErrorCode = "PERMISSION_DENIED" QUOTAEXCEEDED ErrorResponseErrorCode = "QUOTA_EXCEEDED" RESOURCEEXHAUSTED ErrorResponseErrorCode = "RESOURCE_EXHAUSTED" @@ -131,6 +132,9 @@ type ConfigureIndexRequest struct { Replicas *int32 `json:"replicas,omitempty"` } `json:"pod"` } `json:"spec,omitempty"` + + // Tags Custom user tags added to an index. Keys must be alphanumeric and 80 characters or less. Values must be 120 characters or less. + Tags *IndexTags `json:"tags,omitempty"` } // CreateCollectionRequest The configuration needed to create a Pinecone collection. @@ -160,6 +164,9 @@ type CreateIndexRequest struct { // // For serverless indexes, you define only the [cloud and region](http://docs.pinecone.io/guides/indexes/understanding-indexes#cloud-regions) where the index should be hosted. For pod-based indexes, you define the [environment](http://docs.pinecone.io/guides/indexes/understanding-indexes#pod-environments) where the index should be hosted, the [pod type and size](http://docs.pinecone.io/guides/indexes/understanding-indexes#pod-types) to use, and other index characteristics. Spec IndexSpec `json:"spec"` + + // Tags Custom user tags added to an index. Keys must be alphanumeric and 80 characters or less. Values must be 120 characters or less. + Tags *IndexTags `json:"tags,omitempty"` } // CreateIndexRequestMetric The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. @@ -218,6 +225,9 @@ type IndexModel struct { Ready bool `json:"ready"` State IndexModelStatusState `json:"state"` } `json:"status"` + + // Tags Custom user tags added to an index. Keys must be alphanumeric and 80 characters or less. Values must be 120 characters or less. + Tags *IndexTags `json:"tags,omitempty"` } // IndexModelMetric The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. @@ -244,6 +254,9 @@ type IndexSpec0 = interface{} // IndexSpec1 defines model for . type IndexSpec1 = interface{} +// IndexTags Custom user tags added to an index. Keys must be alphanumeric and 80 characters or less. Values must be 120 characters or less. +type IndexTags map[string]*string + // PodSpec Configuration needed to deploy a pod-based index. type PodSpec struct { // Environment The environment where the index is hosted. @@ -1077,6 +1090,7 @@ type CreateCollectionResponse struct { JSON201 *CollectionModel JSON400 *ErrorResponse JSON401 *ErrorResponse + JSON402 *ErrorResponse JSON403 *ErrorResponse JSON409 *ErrorResponse JSON422 *ErrorResponse @@ -1178,6 +1192,7 @@ type CreateIndexResponse struct { JSON201 *IndexModel JSON400 *ErrorResponse JSON401 *ErrorResponse + JSON402 *ErrorResponse JSON403 *ErrorResponse JSON404 *ErrorResponse JSON409 *ErrorResponse @@ -1257,6 +1272,7 @@ type ConfigureIndexResponse struct { JSON202 *IndexModel JSON400 *ErrorResponse JSON401 *ErrorResponse + JSON402 *ErrorResponse JSON403 *ErrorResponse JSON404 *ErrorResponse JSON422 *ErrorResponse @@ -1459,6 +1475,13 @@ func ParseCreateCollectionResponse(rsp *http.Response) (*CreateCollectionRespons } response.JSON401 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 402: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON402 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: var dest ErrorResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { @@ -1654,6 +1677,13 @@ func ParseCreateIndexResponse(rsp *http.Response) (*CreateIndexResponse, error) } response.JSON401 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 402: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON402 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: var dest ErrorResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { @@ -1823,6 +1853,13 @@ func ParseConfigureIndexResponse(rsp *http.Response) (*ConfigureIndexResponse, e } response.JSON401 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 402: + var dest ErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON402 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: var dest ErrorResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { diff --git a/internal/gen/data/db_data_2024-10.pb.go b/internal/gen/db_data/grpc/db_data_2024-10.pb.go similarity index 99% rename from internal/gen/data/db_data_2024-10.pb.go rename to internal/gen/db_data/grpc/db_data_2024-10.pb.go index 04960b8..b7bdd83 100644 --- a/internal/gen/data/db_data_2024-10.pb.go +++ b/internal/gen/db_data/grpc/db_data_2024-10.pb.go @@ -4,7 +4,7 @@ // protoc v5.27.1 // source: db_data_2024-10.proto -package data +package grpc import ( _ "google.golang.org/genproto/googleapis/api/annotations" @@ -1870,12 +1870,13 @@ var file_db_data_2024_10_proto_rawDesc = []byte{ 0x02, 0x33, 0x3a, 0x01, 0x2a, 0x5a, 0x17, 0x12, 0x15, 0x2f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x73, 0x22, 0x15, 0x2f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x5f, - 0x73, 0x74, 0x61, 0x74, 0x73, 0x42, 0x4b, 0x0a, 0x11, 0x69, 0x6f, 0x2e, 0x70, 0x69, 0x6e, 0x65, - 0x63, 0x6f, 0x6e, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x34, 0x67, 0x69, + 0x73, 0x74, 0x61, 0x74, 0x73, 0x42, 0x53, 0x0a, 0x11, 0x69, 0x6f, 0x2e, 0x70, 0x69, 0x6e, 0x65, + 0x63, 0x6f, 0x6e, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x3c, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x69, 0x6e, 0x65, 0x63, 0x6f, 0x6e, 0x65, 0x2d, 0x69, 0x6f, 0x2f, 0x67, 0x6f, 0x2d, 0x70, 0x69, 0x6e, 0x65, 0x63, 0x6f, 0x6e, 0x65, - 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x64, 0x61, - 0x74, 0x61, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x64, 0x62, + 0x5f, 0x64, 0x61, 0x74, 0x61, 0x2f, 0x67, 0x72, 0x70, 0x63, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x33, } var ( diff --git a/internal/gen/data/db_data_2024-10_grpc.pb.go b/internal/gen/db_data/grpc/db_data_2024-10_grpc.pb.go similarity index 99% rename from internal/gen/data/db_data_2024-10_grpc.pb.go rename to internal/gen/db_data/grpc/db_data_2024-10_grpc.pb.go index 88de85e..c296696 100644 --- a/internal/gen/data/db_data_2024-10_grpc.pb.go +++ b/internal/gen/db_data/grpc/db_data_2024-10_grpc.pb.go @@ -4,7 +4,7 @@ // - protoc v5.27.1 // source: db_data_2024-10.proto -package data +package grpc import ( context "context" diff --git a/internal/gen/db_data/rest/db_data_2024-10.oas.go b/internal/gen/db_data/rest/db_data_2024-10.oas.go new file mode 100644 index 0000000..a684d33 --- /dev/null +++ b/internal/gen/db_data/rest/db_data_2024-10.oas.go @@ -0,0 +1,2280 @@ +// Package db_data provides primitives to interact with the openapi HTTP API. +// +// Code generated by github.com/oapi-codegen/oapi-codegen/v2 version v2.3.0 DO NOT EDIT. +package db_data + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strings" + "time" + + "github.com/oapi-codegen/runtime" +) + +const ( + ApiKeyAuthScopes = "ApiKeyAuth.Scopes" +) + +// Defines values for ImportErrorModeOnError. +const ( + Abort ImportErrorModeOnError = "abort" + Continue ImportErrorModeOnError = "continue" +) + +// Defines values for ImportModelStatus. +const ( + Cancelled ImportModelStatus = "Cancelled" + Completed ImportModelStatus = "Completed" + Failed ImportModelStatus = "Failed" + InProgress ImportModelStatus = "InProgress" + Pending ImportModelStatus = "Pending" +) + +// CancelImportResponse The response for the `cancel_import` operation. +type CancelImportResponse = map[string]interface{} + +// DeleteRequest The request for the `delete` operation. +type DeleteRequest struct { + // DeleteAll This indicates that all vectors in the index namespace should be deleted. + DeleteAll *bool `json:"deleteAll,omitempty"` + + // Filter If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See [Filter with metadata](https://docs.pinecone.io/guides/data/filter-with-metadata). + // Serverless indexes do not support delete by metadata. Instead, you can use the `list` operation to fetch the vector IDs based on their common ID prefix and then delete the records by ID. + Filter *map[string]interface{} `json:"filter,omitempty"` + + // Ids Vectors to delete. + Ids *[]string `json:"ids,omitempty"` + + // Namespace The namespace to delete vectors from, if applicable. + Namespace *string `json:"namespace,omitempty"` +} + +// DeleteResponse The response for the `delete` operation. +type DeleteResponse = map[string]interface{} + +// DescribeIndexStatsRequest The request for the `describe_index_stats` operation. +type DescribeIndexStatsRequest struct { + // Filter If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See [Filter with metadata](https://docs.pinecone.io/guides/data/filter-with-metadata). + // + // Serverless indexes do not support filtering `describe_index_stats` by metadata. + Filter *map[string]interface{} `json:"filter,omitempty"` +} + +// FetchResponse The response for the `fetch` operation. +type FetchResponse struct { + // Namespace The namespace of the vectors. + Namespace *string `json:"namespace,omitempty"` + Usage *Usage `json:"usage,omitempty"` + Vectors *map[string]Vector `json:"vectors,omitempty"` +} + +// ImportErrorMode Indicates how to respond to errors during the import process. +type ImportErrorMode struct { + // OnError Indicates how to respond to errors during the import process. + OnError *ImportErrorModeOnError `json:"onError,omitempty"` +} + +// ImportErrorModeOnError Indicates how to respond to errors during the import process. +type ImportErrorModeOnError string + +// ImportModel The model for an import operation. +type ImportModel struct { + // CreatedAt The start time of the import operation. + CreatedAt *time.Time `json:"createdAt,omitempty"` + + // Error The error message if the import process failed. + Error *string `json:"error,omitempty"` + + // FinishedAt The end time of the import operation. + FinishedAt *time.Time `json:"finishedAt,omitempty"` + + // Id Unique identifier for the import operation. + Id *string `json:"id,omitempty"` + + // PercentComplete The progress made by the operation out of 100 + PercentComplete *float32 `json:"percentComplete,omitempty"` + + // RecordsImported The number of records successfully imported. + RecordsImported *int64 `json:"recordsImported,omitempty"` + + // Status The status of the operation. + Status *ImportModelStatus `json:"status,omitempty"` + + // Uri The URI from where the data is imported. + Uri *string `json:"uri,omitempty"` +} + +// ImportModelStatus The status of the operation. +type ImportModelStatus string + +// IndexDescription The response for the `describe_index_stats` operation. +type IndexDescription struct { + // Dimension The dimension of the indexed vectors. + Dimension *int64 `json:"dimension,omitempty"` + + // IndexFullness The fullness of the index, regardless of whether a metadata filter expression was passed. The granularity of this metric is 10%. + // + // Serverless indexes scale automatically as needed, so index fullness is relevant only for pod-based indexes. + // + // The index fullness result may be inaccurate during pod resizing; to get the status of a pod resizing process, use [`describe_index`](https://docs.pinecone.io/reference/api/control-plane/describe_index). + IndexFullness *float32 `json:"indexFullness,omitempty"` + + // Namespaces A mapping for each namespace in the index from the namespace name to a summary of its contents. If a metadata filter expression is present, the summary will reflect only vectors matching that expression. + Namespaces *map[string]NamespaceSummary `json:"namespaces,omitempty"` + + // TotalVectorCount The total number of vectors in the index, regardless of whether a metadata filter expression was passed + TotalVectorCount *int64 `json:"totalVectorCount,omitempty"` +} + +// ListImportsResponse The response for the `list_imports` operation. +type ListImportsResponse struct { + Data *[]ImportModel `json:"data,omitempty"` + Pagination *Pagination `json:"pagination,omitempty"` +} + +// ListItem defines model for ListItem. +type ListItem struct { + Id *string `json:"id,omitempty"` +} + +// ListResponse The response for the `list` operation. +type ListResponse struct { + // Namespace The namespace of the vectors. + Namespace *string `json:"namespace,omitempty"` + Pagination *Pagination `json:"pagination,omitempty"` + Usage *Usage `json:"usage,omitempty"` + Vectors *[]ListItem `json:"vectors,omitempty"` +} + +// NamespaceSummary A summary of the contents of a namespace. +type NamespaceSummary struct { + // VectorCount The number of vectors stored in this namespace. Note that updates to this field may lag behind updates to the underlying index and corresponding query results, etc. + VectorCount *int64 `json:"vectorCount,omitempty"` +} + +// Pagination defines model for Pagination. +type Pagination struct { + Next *string `json:"next,omitempty"` +} + +// QueryRequest The request for the `query` operation. +type QueryRequest struct { + // Filter The filter to apply. You can use vector metadata to limit your search. See [Filter with metadata](https://docs.pinecone.io/guides/data/filter-with-metadata). + Filter *map[string]interface{} `json:"filter,omitempty"` + + // Id The unique ID of the vector to be used as a query vector. Each `query` request can contain only one of the parameters `queries`, `vector`, or `id`. + Id *string `json:"id,omitempty"` + + // IncludeMetadata Indicates whether metadata is included in the response as well as the ids. + IncludeMetadata *bool `json:"includeMetadata,omitempty"` + + // IncludeValues Indicates whether vector values are included in the response. + IncludeValues *bool `json:"includeValues,omitempty"` + + // Namespace The namespace to query. + Namespace *string `json:"namespace,omitempty"` + + // Queries DEPRECATED. The query vectors. Each `query()` request can contain only one of the parameters `queries`, `vector`, or `id`. + // Deprecated: + Queries *[]QueryVector `json:"queries,omitempty"` + + // SparseVector Vector sparse data. Represented as a list of indices and a list of corresponded values, which must be with the same length. + SparseVector *SparseValues `json:"sparseVector,omitempty"` + + // TopK The number of results to return for each query. + TopK int64 `json:"topK"` + + // Vector The query vector. This should be the same length as the dimension of the index being queried. Each `query` request can contain only one of the parameters `id` or `vector`. + Vector *[]float32 `json:"vector,omitempty"` +} + +// QueryResponse The response for the `query` operation. These are the matches found for a particular query vector. The matches are ordered from most similar to least similar. +type QueryResponse struct { + // Matches The matches for the vectors. + Matches *[]ScoredVector `json:"matches,omitempty"` + + // Namespace The namespace for the vectors. + Namespace *string `json:"namespace,omitempty"` + + // Results DEPRECATED. The results of each query. The order is the same as `QueryRequest.queries`. + // Deprecated: + Results *[]SingleQueryResults `json:"results,omitempty"` + Usage *Usage `json:"usage,omitempty"` +} + +// QueryVector A single query vector within a `QueryRequest`. +type QueryVector struct { + // Filter An override for the metadata filter to apply. This replaces the request-level filter. + Filter *map[string]interface{} `json:"filter,omitempty"` + + // Namespace An override the namespace to search. + Namespace *string `json:"namespace,omitempty"` + + // SparseValues Vector sparse data. Represented as a list of indices and a list of corresponded values, which must be with the same length. + SparseValues *SparseValues `json:"sparseValues,omitempty"` + + // TopK An override for the number of results to return for this query vector. + TopK *int64 `json:"topK,omitempty"` + + // Values The query vector values. This should be the same length as the dimension of the index being queried. + Values []float32 `json:"values"` +} + +// ScoredVector defines model for ScoredVector. +type ScoredVector struct { + // Id This is the vector's unique id. + Id string `json:"id"` + + // Metadata This is the metadata, if it is requested. + Metadata *map[string]interface{} `json:"metadata,omitempty"` + + // Score This is a measure of similarity between this vector and the query vector. The higher the score, the more they are similar. + Score *float32 `json:"score,omitempty"` + + // SparseValues Vector sparse data. Represented as a list of indices and a list of corresponded values, which must be with the same length. + SparseValues *SparseValues `json:"sparseValues,omitempty"` + + // Values This is the vector data, if it is requested. + Values *[]float32 `json:"values,omitempty"` +} + +// SingleQueryResults defines model for SingleQueryResults. +type SingleQueryResults struct { + // Matches The matches for the vectors. + Matches *[]ScoredVector `json:"matches,omitempty"` + + // Namespace The namespace for the vectors. + Namespace *string `json:"namespace,omitempty"` +} + +// SparseValues Vector sparse data. Represented as a list of indices and a list of corresponded values, which must be with the same length. +type SparseValues struct { + // Indices The indices of the sparse data. + Indices []int64 `json:"indices"` + + // Values The corresponding values of the sparse data, which must be with the same length as the indices. + Values []float32 `json:"values"` +} + +// StartImportRequest The request for the `start_import` operation. +type StartImportRequest struct { + // ErrorMode Indicates how to respond to errors during the import process. + ErrorMode *ImportErrorMode `json:"errorMode,omitempty"` + + // IntegrationId The id of the storage integration that should be used to access the data. + IntegrationId *string `json:"integrationId,omitempty"` + + // Uri The URI prefix under which the data to import is available. All data within this prefix will be listed then imported into the target index. Currently only `s3://` URIs are supported. + Uri *string `json:"uri,omitempty"` +} + +// StartImportResponse The response for the `start_import` operation. +type StartImportResponse struct { + // Id Unique identifier for the import operations. + Id *string `json:"id,omitempty"` +} + +// UpdateRequest The request for the `update` operation. +type UpdateRequest struct { + // Id Vector's unique id. + Id string `json:"id"` + + // Namespace The namespace containing the vector to update. + Namespace *string `json:"namespace,omitempty"` + + // SetMetadata Metadata to set for the vector. + SetMetadata *map[string]interface{} `json:"setMetadata,omitempty"` + + // SparseValues Vector sparse data. Represented as a list of indices and a list of corresponded values, which must be with the same length. + SparseValues *SparseValues `json:"sparseValues,omitempty"` + + // Values Vector data. + Values *[]float32 `json:"values,omitempty"` +} + +// UpdateResponse The response for the `update` operation. +type UpdateResponse = map[string]interface{} + +// UpsertRequest The request for the `upsert` operation. +type UpsertRequest struct { + // Namespace The namespace where you upsert vectors. + Namespace *string `json:"namespace,omitempty"` + + // Vectors An array containing the vectors to upsert. Recommended batch limit is 100 vectors. + Vectors []Vector `json:"vectors"` +} + +// UpsertResponse The response for the `upsert` operation. +type UpsertResponse struct { + // UpsertedCount The number of vectors upserted. + UpsertedCount *int64 `json:"upsertedCount,omitempty"` +} + +// Usage defines model for Usage. +type Usage struct { + // ReadUnits The number of read units consumed by this operation. + ReadUnits *int64 `json:"readUnits,omitempty"` +} + +// Vector defines model for Vector. +type Vector struct { + // Id This is the vector's unique id. + Id string `json:"id"` + + // Metadata This is the metadata included in the request. + Metadata *map[string]interface{} `json:"metadata,omitempty"` + + // SparseValues Vector sparse data. Represented as a list of indices and a list of corresponded values, which must be with the same length. + SparseValues *SparseValues `json:"sparseValues,omitempty"` + + // Values This is the vector data included in the request. + Values []float32 `json:"values"` +} + +// ProtobufAny defines model for protobufAny. +type ProtobufAny struct { + TypeUrl *string `json:"typeUrl,omitempty"` + Value *[]byte `json:"value,omitempty"` +} + +// RpcStatus defines model for rpcStatus. +type RpcStatus struct { + Code *int32 `json:"code,omitempty"` + Details *[]ProtobufAny `json:"details,omitempty"` + Message *string `json:"message,omitempty"` +} + +// ListBulkImportsParams defines parameters for ListBulkImports. +type ListBulkImportsParams struct { + // Limit Max number of operations to return per page. + Limit *int32 `form:"limit,omitempty" json:"limit,omitempty"` + + // PaginationToken Pagination token to continue a previous listing operation. + PaginationToken *string `form:"paginationToken,omitempty" json:"paginationToken,omitempty"` +} + +// FetchVectorsParams defines parameters for FetchVectors. +type FetchVectorsParams struct { + // Ids The vector IDs to fetch. Does not accept values containing spaces. + Ids []string `form:"ids" json:"ids"` + Namespace *string `form:"namespace,omitempty" json:"namespace,omitempty"` +} + +// ListVectorsParams defines parameters for ListVectors. +type ListVectorsParams struct { + // Prefix The vector IDs to fetch. Does not accept values containing spaces. + Prefix *string `form:"prefix,omitempty" json:"prefix,omitempty"` + + // Limit Max number of IDs to return per page. + Limit *int64 `form:"limit,omitempty" json:"limit,omitempty"` + + // PaginationToken Pagination token to continue a previous listing operation. + PaginationToken *string `form:"paginationToken,omitempty" json:"paginationToken,omitempty"` + Namespace *string `form:"namespace,omitempty" json:"namespace,omitempty"` +} + +// StartBulkImportJSONRequestBody defines body for StartBulkImport for application/json ContentType. +type StartBulkImportJSONRequestBody = StartImportRequest + +// DescribeIndexStatsJSONRequestBody defines body for DescribeIndexStats for application/json ContentType. +type DescribeIndexStatsJSONRequestBody = DescribeIndexStatsRequest + +// QueryVectorsJSONRequestBody defines body for QueryVectors for application/json ContentType. +type QueryVectorsJSONRequestBody = QueryRequest + +// DeleteVectorsJSONRequestBody defines body for DeleteVectors for application/json ContentType. +type DeleteVectorsJSONRequestBody = DeleteRequest + +// UpdateVectorJSONRequestBody defines body for UpdateVector for application/json ContentType. +type UpdateVectorJSONRequestBody = UpdateRequest + +// UpsertVectorsJSONRequestBody defines body for UpsertVectors for application/json ContentType. +type UpsertVectorsJSONRequestBody = UpsertRequest + +// RequestEditorFn is the function signature for the RequestEditor callback function +type RequestEditorFn func(ctx context.Context, req *http.Request) error + +// Doer performs HTTP requests. +// +// The standard http.Client implements this interface. +type HttpRequestDoer interface { + Do(req *http.Request) (*http.Response, error) +} + +// Client which conforms to the OpenAPI3 specification for this service. +type Client struct { + // The endpoint of the server conforming to this interface, with scheme, + // https://api.deepmap.com for example. This can contain a path relative + // to the server, such as https://api.deepmap.com/dev-test, and all the + // paths in the swagger spec will be appended to the server. + Server string + + // Doer for performing requests, typically a *http.Client with any + // customized settings, such as certificate chains. + Client HttpRequestDoer + + // A list of callbacks for modifying requests which are generated before sending over + // the network. + RequestEditors []RequestEditorFn +} + +// ClientOption allows setting custom parameters during construction +type ClientOption func(*Client) error + +// Creates a new Client, with reasonable defaults +func NewClient(server string, opts ...ClientOption) (*Client, error) { + // create a client with sane default values + client := Client{ + Server: server, + } + // mutate client and add all optional params + for _, o := range opts { + if err := o(&client); err != nil { + return nil, err + } + } + // ensure the server URL always has a trailing slash + if !strings.HasSuffix(client.Server, "/") { + client.Server += "/" + } + // create httpClient, if not already present + if client.Client == nil { + client.Client = &http.Client{} + } + return &client, nil +} + +// WithHTTPClient allows overriding the default Doer, which is +// automatically created using http.Client. This is useful for tests. +func WithHTTPClient(doer HttpRequestDoer) ClientOption { + return func(c *Client) error { + c.Client = doer + return nil + } +} + +// WithRequestEditorFn allows setting up a callback function, which will be +// called right before sending the request. This can be used to mutate the request. +func WithRequestEditorFn(fn RequestEditorFn) ClientOption { + return func(c *Client) error { + c.RequestEditors = append(c.RequestEditors, fn) + return nil + } +} + +// The interface specification for the client above. +type ClientInterface interface { + // ListBulkImports request + ListBulkImports(ctx context.Context, params *ListBulkImportsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // StartBulkImportWithBody request with any body + StartBulkImportWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + StartBulkImport(ctx context.Context, body StartBulkImportJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CancelBulkImport request + CancelBulkImport(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DescribeBulkImport request + DescribeBulkImport(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DescribeIndexStatsWithBody request with any body + DescribeIndexStatsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + DescribeIndexStats(ctx context.Context, body DescribeIndexStatsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // QueryVectorsWithBody request with any body + QueryVectorsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + QueryVectors(ctx context.Context, body QueryVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteVectorsWithBody request with any body + DeleteVectorsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + DeleteVectors(ctx context.Context, body DeleteVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FetchVectors request + FetchVectors(ctx context.Context, params *FetchVectorsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ListVectors request + ListVectors(ctx context.Context, params *ListVectorsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateVectorWithBody request with any body + UpdateVectorWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateVector(ctx context.Context, body UpdateVectorJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpsertVectorsWithBody request with any body + UpsertVectorsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpsertVectors(ctx context.Context, body UpsertVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) +} + +func (c *Client) ListBulkImports(ctx context.Context, params *ListBulkImportsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewListBulkImportsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) StartBulkImportWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewStartBulkImportRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) StartBulkImport(ctx context.Context, body StartBulkImportJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewStartBulkImportRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CancelBulkImport(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCancelBulkImportRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DescribeBulkImport(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDescribeBulkImportRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DescribeIndexStatsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDescribeIndexStatsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DescribeIndexStats(ctx context.Context, body DescribeIndexStatsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDescribeIndexStatsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) QueryVectorsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewQueryVectorsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) QueryVectors(ctx context.Context, body QueryVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewQueryVectorsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteVectorsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteVectorsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteVectors(ctx context.Context, body DeleteVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteVectorsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FetchVectors(ctx context.Context, params *FetchVectorsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFetchVectorsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ListVectors(ctx context.Context, params *ListVectorsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewListVectorsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateVectorWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateVectorRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateVector(ctx context.Context, body UpdateVectorJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateVectorRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpsertVectorsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpsertVectorsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpsertVectors(ctx context.Context, body UpsertVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpsertVectorsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +// NewListBulkImportsRequest generates requests for ListBulkImports +func NewListBulkImportsRequest(server string, params *ListBulkImportsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/bulk/imports") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Limit != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "limit", runtime.ParamLocationQuery, *params.Limit); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PaginationToken != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "paginationToken", runtime.ParamLocationQuery, *params.PaginationToken); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewStartBulkImportRequest calls the generic StartBulkImport builder with application/json body +func NewStartBulkImportRequest(server string, body StartBulkImportJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewStartBulkImportRequestWithBody(server, "application/json", bodyReader) +} + +// NewStartBulkImportRequestWithBody generates requests for StartBulkImport with any type of body +func NewStartBulkImportRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/bulk/imports") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewCancelBulkImportRequest generates requests for CancelBulkImport +func NewCancelBulkImportRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/bulk/imports/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDescribeBulkImportRequest generates requests for DescribeBulkImport +func NewDescribeBulkImportRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/bulk/imports/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDescribeIndexStatsRequest calls the generic DescribeIndexStats builder with application/json body +func NewDescribeIndexStatsRequest(server string, body DescribeIndexStatsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewDescribeIndexStatsRequestWithBody(server, "application/json", bodyReader) +} + +// NewDescribeIndexStatsRequestWithBody generates requests for DescribeIndexStats with any type of body +func NewDescribeIndexStatsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/describe_index_stats") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewQueryVectorsRequest calls the generic QueryVectors builder with application/json body +func NewQueryVectorsRequest(server string, body QueryVectorsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewQueryVectorsRequestWithBody(server, "application/json", bodyReader) +} + +// NewQueryVectorsRequestWithBody generates requests for QueryVectors with any type of body +func NewQueryVectorsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/query") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteVectorsRequest calls the generic DeleteVectors builder with application/json body +func NewDeleteVectorsRequest(server string, body DeleteVectorsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewDeleteVectorsRequestWithBody(server, "application/json", bodyReader) +} + +// NewDeleteVectorsRequestWithBody generates requests for DeleteVectors with any type of body +func NewDeleteVectorsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/vectors/delete") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewFetchVectorsRequest generates requests for FetchVectors +func NewFetchVectorsRequest(server string, params *FetchVectorsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/vectors/fetch") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "ids", runtime.ParamLocationQuery, params.Ids); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if params.Namespace != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "namespace", runtime.ParamLocationQuery, *params.Namespace); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewListVectorsRequest generates requests for ListVectors +func NewListVectorsRequest(server string, params *ListVectorsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/vectors/list") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Prefix != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "prefix", runtime.ParamLocationQuery, *params.Prefix); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Limit != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "limit", runtime.ParamLocationQuery, *params.Limit); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PaginationToken != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "paginationToken", runtime.ParamLocationQuery, *params.PaginationToken); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Namespace != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "namespace", runtime.ParamLocationQuery, *params.Namespace); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewUpdateVectorRequest calls the generic UpdateVector builder with application/json body +func NewUpdateVectorRequest(server string, body UpdateVectorJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateVectorRequestWithBody(server, "application/json", bodyReader) +} + +// NewUpdateVectorRequestWithBody generates requests for UpdateVector with any type of body +func NewUpdateVectorRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/vectors/update") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewUpsertVectorsRequest calls the generic UpsertVectors builder with application/json body +func NewUpsertVectorsRequest(server string, body UpsertVectorsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpsertVectorsRequestWithBody(server, "application/json", bodyReader) +} + +// NewUpsertVectorsRequestWithBody generates requests for UpsertVectors with any type of body +func NewUpsertVectorsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/vectors/upsert") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +func (c *Client) applyEditors(ctx context.Context, req *http.Request, additionalEditors []RequestEditorFn) error { + for _, r := range c.RequestEditors { + if err := r(ctx, req); err != nil { + return err + } + } + for _, r := range additionalEditors { + if err := r(ctx, req); err != nil { + return err + } + } + return nil +} + +// ClientWithResponses builds on ClientInterface to offer response payloads +type ClientWithResponses struct { + ClientInterface +} + +// NewClientWithResponses creates a new ClientWithResponses, which wraps +// Client with return type handling +func NewClientWithResponses(server string, opts ...ClientOption) (*ClientWithResponses, error) { + client, err := NewClient(server, opts...) + if err != nil { + return nil, err + } + return &ClientWithResponses{client}, nil +} + +// WithBaseURL overrides the baseURL. +func WithBaseURL(baseURL string) ClientOption { + return func(c *Client) error { + newBaseURL, err := url.Parse(baseURL) + if err != nil { + return err + } + c.Server = newBaseURL.String() + return nil + } +} + +// ClientWithResponsesInterface is the interface specification for the client with responses above. +type ClientWithResponsesInterface interface { + // ListBulkImportsWithResponse request + ListBulkImportsWithResponse(ctx context.Context, params *ListBulkImportsParams, reqEditors ...RequestEditorFn) (*ListBulkImportsResponse, error) + + // StartBulkImportWithBodyWithResponse request with any body + StartBulkImportWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*StartBulkImportResponse, error) + + StartBulkImportWithResponse(ctx context.Context, body StartBulkImportJSONRequestBody, reqEditors ...RequestEditorFn) (*StartBulkImportResponse, error) + + // CancelBulkImportWithResponse request + CancelBulkImportWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*CancelBulkImportResponse, error) + + // DescribeBulkImportWithResponse request + DescribeBulkImportWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DescribeBulkImportResponse, error) + + // DescribeIndexStatsWithBodyWithResponse request with any body + DescribeIndexStatsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DescribeIndexStatsResponse, error) + + DescribeIndexStatsWithResponse(ctx context.Context, body DescribeIndexStatsJSONRequestBody, reqEditors ...RequestEditorFn) (*DescribeIndexStatsResponse, error) + + // QueryVectorsWithBodyWithResponse request with any body + QueryVectorsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*QueryVectorsResponse, error) + + QueryVectorsWithResponse(ctx context.Context, body QueryVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*QueryVectorsResponse, error) + + // DeleteVectorsWithBodyWithResponse request with any body + DeleteVectorsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteVectorsResponse, error) + + DeleteVectorsWithResponse(ctx context.Context, body DeleteVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteVectorsResponse, error) + + // FetchVectorsWithResponse request + FetchVectorsWithResponse(ctx context.Context, params *FetchVectorsParams, reqEditors ...RequestEditorFn) (*FetchVectorsResponse, error) + + // ListVectorsWithResponse request + ListVectorsWithResponse(ctx context.Context, params *ListVectorsParams, reqEditors ...RequestEditorFn) (*ListVectorsResponse, error) + + // UpdateVectorWithBodyWithResponse request with any body + UpdateVectorWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateVectorResponse, error) + + UpdateVectorWithResponse(ctx context.Context, body UpdateVectorJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateVectorResponse, error) + + // UpsertVectorsWithBodyWithResponse request with any body + UpsertVectorsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpsertVectorsResponse, error) + + UpsertVectorsWithResponse(ctx context.Context, body UpsertVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*UpsertVectorsResponse, error) +} + +type ListBulkImportsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ListImportsResponse + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r ListBulkImportsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ListBulkImportsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type StartBulkImportResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *StartImportResponse + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r StartBulkImportResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r StartBulkImportResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CancelBulkImportResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *CancelImportResponse + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r CancelBulkImportResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CancelBulkImportResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DescribeBulkImportResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ImportModel + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r DescribeBulkImportResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DescribeBulkImportResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DescribeIndexStatsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *IndexDescription + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r DescribeIndexStatsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DescribeIndexStatsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type QueryVectorsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *QueryResponse + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r QueryVectorsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r QueryVectorsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteVectorsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *DeleteResponse + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r DeleteVectorsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteVectorsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type FetchVectorsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *FetchResponse + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r FetchVectorsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r FetchVectorsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ListVectorsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ListResponse + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r ListVectorsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ListVectorsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UpdateVectorResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *UpdateResponse + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r UpdateVectorResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateVectorResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UpsertVectorsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *UpsertResponse + JSON400 *RpcStatus + JSON4XX *RpcStatus + JSON5XX *RpcStatus +} + +// Status returns HTTPResponse.Status +func (r UpsertVectorsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpsertVectorsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +// ListBulkImportsWithResponse request returning *ListBulkImportsResponse +func (c *ClientWithResponses) ListBulkImportsWithResponse(ctx context.Context, params *ListBulkImportsParams, reqEditors ...RequestEditorFn) (*ListBulkImportsResponse, error) { + rsp, err := c.ListBulkImports(ctx, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseListBulkImportsResponse(rsp) +} + +// StartBulkImportWithBodyWithResponse request with arbitrary body returning *StartBulkImportResponse +func (c *ClientWithResponses) StartBulkImportWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*StartBulkImportResponse, error) { + rsp, err := c.StartBulkImportWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseStartBulkImportResponse(rsp) +} + +func (c *ClientWithResponses) StartBulkImportWithResponse(ctx context.Context, body StartBulkImportJSONRequestBody, reqEditors ...RequestEditorFn) (*StartBulkImportResponse, error) { + rsp, err := c.StartBulkImport(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseStartBulkImportResponse(rsp) +} + +// CancelBulkImportWithResponse request returning *CancelBulkImportResponse +func (c *ClientWithResponses) CancelBulkImportWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*CancelBulkImportResponse, error) { + rsp, err := c.CancelBulkImport(ctx, id, reqEditors...) + if err != nil { + return nil, err + } + return ParseCancelBulkImportResponse(rsp) +} + +// DescribeBulkImportWithResponse request returning *DescribeBulkImportResponse +func (c *ClientWithResponses) DescribeBulkImportWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DescribeBulkImportResponse, error) { + rsp, err := c.DescribeBulkImport(ctx, id, reqEditors...) + if err != nil { + return nil, err + } + return ParseDescribeBulkImportResponse(rsp) +} + +// DescribeIndexStatsWithBodyWithResponse request with arbitrary body returning *DescribeIndexStatsResponse +func (c *ClientWithResponses) DescribeIndexStatsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DescribeIndexStatsResponse, error) { + rsp, err := c.DescribeIndexStatsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseDescribeIndexStatsResponse(rsp) +} + +func (c *ClientWithResponses) DescribeIndexStatsWithResponse(ctx context.Context, body DescribeIndexStatsJSONRequestBody, reqEditors ...RequestEditorFn) (*DescribeIndexStatsResponse, error) { + rsp, err := c.DescribeIndexStats(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseDescribeIndexStatsResponse(rsp) +} + +// QueryVectorsWithBodyWithResponse request with arbitrary body returning *QueryVectorsResponse +func (c *ClientWithResponses) QueryVectorsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*QueryVectorsResponse, error) { + rsp, err := c.QueryVectorsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseQueryVectorsResponse(rsp) +} + +func (c *ClientWithResponses) QueryVectorsWithResponse(ctx context.Context, body QueryVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*QueryVectorsResponse, error) { + rsp, err := c.QueryVectors(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseQueryVectorsResponse(rsp) +} + +// DeleteVectorsWithBodyWithResponse request with arbitrary body returning *DeleteVectorsResponse +func (c *ClientWithResponses) DeleteVectorsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteVectorsResponse, error) { + rsp, err := c.DeleteVectorsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseDeleteVectorsResponse(rsp) +} + +func (c *ClientWithResponses) DeleteVectorsWithResponse(ctx context.Context, body DeleteVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteVectorsResponse, error) { + rsp, err := c.DeleteVectors(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseDeleteVectorsResponse(rsp) +} + +// FetchVectorsWithResponse request returning *FetchVectorsResponse +func (c *ClientWithResponses) FetchVectorsWithResponse(ctx context.Context, params *FetchVectorsParams, reqEditors ...RequestEditorFn) (*FetchVectorsResponse, error) { + rsp, err := c.FetchVectors(ctx, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseFetchVectorsResponse(rsp) +} + +// ListVectorsWithResponse request returning *ListVectorsResponse +func (c *ClientWithResponses) ListVectorsWithResponse(ctx context.Context, params *ListVectorsParams, reqEditors ...RequestEditorFn) (*ListVectorsResponse, error) { + rsp, err := c.ListVectors(ctx, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseListVectorsResponse(rsp) +} + +// UpdateVectorWithBodyWithResponse request with arbitrary body returning *UpdateVectorResponse +func (c *ClientWithResponses) UpdateVectorWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateVectorResponse, error) { + rsp, err := c.UpdateVectorWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseUpdateVectorResponse(rsp) +} + +func (c *ClientWithResponses) UpdateVectorWithResponse(ctx context.Context, body UpdateVectorJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateVectorResponse, error) { + rsp, err := c.UpdateVector(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseUpdateVectorResponse(rsp) +} + +// UpsertVectorsWithBodyWithResponse request with arbitrary body returning *UpsertVectorsResponse +func (c *ClientWithResponses) UpsertVectorsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpsertVectorsResponse, error) { + rsp, err := c.UpsertVectorsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseUpsertVectorsResponse(rsp) +} + +func (c *ClientWithResponses) UpsertVectorsWithResponse(ctx context.Context, body UpsertVectorsJSONRequestBody, reqEditors ...RequestEditorFn) (*UpsertVectorsResponse, error) { + rsp, err := c.UpsertVectors(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseUpsertVectorsResponse(rsp) +} + +// ParseListBulkImportsResponse parses an HTTP response from a ListBulkImportsWithResponse call +func ParseListBulkImportsResponse(rsp *http.Response) (*ListBulkImportsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &ListBulkImportsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ListImportsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseStartBulkImportResponse parses an HTTP response from a StartBulkImportWithResponse call +func ParseStartBulkImportResponse(rsp *http.Response) (*StartBulkImportResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &StartBulkImportResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest StartImportResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseCancelBulkImportResponse parses an HTTP response from a CancelBulkImportWithResponse call +func ParseCancelBulkImportResponse(rsp *http.Response) (*CancelBulkImportResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &CancelBulkImportResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest CancelImportResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseDescribeBulkImportResponse parses an HTTP response from a DescribeBulkImportWithResponse call +func ParseDescribeBulkImportResponse(rsp *http.Response) (*DescribeBulkImportResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &DescribeBulkImportResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ImportModel + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseDescribeIndexStatsResponse parses an HTTP response from a DescribeIndexStatsWithResponse call +func ParseDescribeIndexStatsResponse(rsp *http.Response) (*DescribeIndexStatsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &DescribeIndexStatsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest IndexDescription + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseQueryVectorsResponse parses an HTTP response from a QueryVectorsWithResponse call +func ParseQueryVectorsResponse(rsp *http.Response) (*QueryVectorsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &QueryVectorsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest QueryResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseDeleteVectorsResponse parses an HTTP response from a DeleteVectorsWithResponse call +func ParseDeleteVectorsResponse(rsp *http.Response) (*DeleteVectorsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &DeleteVectorsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest DeleteResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseFetchVectorsResponse parses an HTTP response from a FetchVectorsWithResponse call +func ParseFetchVectorsResponse(rsp *http.Response) (*FetchVectorsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &FetchVectorsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest FetchResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseListVectorsResponse parses an HTTP response from a ListVectorsWithResponse call +func ParseListVectorsResponse(rsp *http.Response) (*ListVectorsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &ListVectorsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ListResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseUpdateVectorResponse parses an HTTP response from a UpdateVectorWithResponse call +func ParseUpdateVectorResponse(rsp *http.Response) (*UpdateVectorResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &UpdateVectorResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest UpdateResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} + +// ParseUpsertVectorsResponse parses an HTTP response from a UpsertVectorsWithResponse call +func ParseUpsertVectorsResponse(rsp *http.Response) (*UpsertVectorsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &UpsertVectorsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest UpsertResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 4: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON4XX = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode/100 == 5: + var dest RpcStatus + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON5XX = &dest + + } + + return response, nil +} diff --git a/internal/gen/inference/inference_2024-10.oas.go b/internal/gen/inference/inference_2024-10.oas.go index 580b724..13b2a59 100644 --- a/internal/gen/inference/inference_2024-10.oas.go +++ b/internal/gen/inference/inference_2024-10.oas.go @@ -45,12 +45,12 @@ type Document map[string]string // EmbedRequest defines model for EmbedRequest. type EmbedRequest struct { - // Inputs List of inputs to generate embeddings for that varies by model + // Inputs List of inputs to generate embeddings for. Inputs []struct { Text *string `json:"text,omitempty"` } `json:"inputs"` - // Model Model name to use for embedding generation. + // Model The [model](https://docs.pinecone.io/guides/inference/understanding-inference#models) to use for embedding generation. Model string `json:"model"` // Parameters Model-specific parameters. @@ -58,7 +58,7 @@ type EmbedRequest struct { // InputType Common property used to distinguish between types of data. InputType *string `json:"input_type,omitempty"` - // Truncate How to handle inputs longer than those supported by the model. If NONE, when the input exceeds the maximum input token length an error will be returned. + // Truncate How to handle inputs longer than those supported by the model. If `"END"`, truncate the input sequence at the token limit. If `"NONE"`, return an error when the input exceeds the token limit. Truncate *string `json:"truncate,omitempty"` } `json:"parameters,omitempty"` } @@ -71,15 +71,15 @@ type Embedding struct { // EmbeddingsList Embeddings generated for the input type EmbeddingsList struct { - // Data The embeddings generated for the inputs + // Data The embeddings generated for the inputs. Data []Embedding `json:"data"` // Model The model used to generate the embeddings Model string `json:"model"` - // Usage Usage statistics for model inference including any instruction prefixes + // Usage Usage statistics for the model inference. Usage struct { - // TotalTokens Total number tokens consumed across all inputs + // TotalTokens Total number of tokens consumed across all inputs. TotalTokens *int `json:"total_tokens,omitempty"` } `json:"usage"` } @@ -102,33 +102,47 @@ type ErrorResponse struct { // ErrorResponseErrorCode defines model for ErrorResponse.Error.Code. type ErrorResponseErrorCode string -// RankResult Reranking score of single input -type RankResult struct { +// RankedDocument A ranked document with a relevance score and an index position. +type RankedDocument struct { // Document Document for reranking Document *Document `json:"document,omitempty"` - // Index The index of the document in the input list + // Index The index of the document Index int `json:"index"` // Score The relevance score of the document normalized between 0 and 1. Score float32 `json:"score"` } +// RerankResult The result of a reranking request. +type RerankResult struct { + // Data The reranked documents. + Data []RankedDocument `json:"data"` + + // Model The model used to rerank documents. + Model string `json:"model"` + + // Usage Usage statistics for the model inference. + Usage struct { + RerankUnits *int `json:"rerank_units,omitempty"` + } `json:"usage"` +} + // RerankJSONBody defines parameters for Rerank. type RerankJSONBody struct { // Documents The documents to rerank. Documents []Document `json:"documents"` - // Model Model to use for reranking. + // Model The [model](https://docs.pinecone.io/guides/inference/understanding-inference#models) to use for reranking. Model string `json:"model"` - // Parameters Additional model specific parameters for the reranker. + // Parameters Additional model-specific parameters for the reranker. Parameters *map[string]string `json:"parameters,omitempty"` // Query The query to rerank documents against. Query string `json:"query"` - // RankFields The fields to rank the documents by. If not provided the default is text. + // RankFields The fields to rank the documents by. If not provided, the default is `"text"`. RankFields *[]string `json:"rank_fields,omitempty"` // ReturnDocuments Whether to return the documents in the response. @@ -438,21 +452,10 @@ func (r EmbedResponse) StatusCode() int { type RerankResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *struct { - // Data The reranked documents - Data []RankResult `json:"data"` - - // Model The model used for reranking - Model string `json:"model"` - - // Usage Usage statistics for the inference - Usage struct { - RerankUnits *int `json:"rerank_units,omitempty"` - } `json:"usage"` - } - JSON400 *ErrorResponse - JSON401 *ErrorResponse - JSON500 *ErrorResponse + JSON200 *RerankResult + JSON400 *ErrorResponse + JSON401 *ErrorResponse + JSON500 *ErrorResponse } // Status returns HTTPResponse.Status @@ -567,18 +570,7 @@ func ParseRerankResponse(rsp *http.Response) (*RerankResponse, error) { switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - // Data The reranked documents - Data []RankResult `json:"data"` - - // Model The model used for reranking - Model string `json:"model"` - - // Usage Usage statistics for the inference - Usage struct { - RerankUnits *int `json:"rerank_units,omitempty"` - } `json:"usage"` - } + var dest RerankResult if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } diff --git a/pinecone/client.go b/pinecone/client.go index 72ce15a..01e1c06 100644 --- a/pinecone/client.go +++ b/pinecone/client.go @@ -16,6 +16,7 @@ import ( "github.com/pinecone-io/go-pinecone/internal/gen" "github.com/pinecone-io/go-pinecone/internal/gen/db_control" + db_data_rest "github.com/pinecone-io/go-pinecone/internal/gen/db_data/rest" "github.com/pinecone-io/go-pinecone/internal/gen/inference" "github.com/pinecone-io/go-pinecone/internal/provider" "github.com/pinecone-io/go-pinecone/internal/useragent" @@ -32,13 +33,10 @@ import ( // // Fields: // - Inference: An InferenceService object that exposes methods for interacting with the Pinecone [Inference API]. -// - headers: An optional map of HTTP headers to include in each API request, provided through -// NewClientParams.Headers or NewClientBaseParams.Headers. // - restClient: Optional underlying *http.Client object used to communicate with the Pinecone API, // provided through NewClientParams.RestClient or NewClientBaseParams.RestClient. If not provided, // a default client is created for you. -// - sourceTag: An optional string used to help Pinecone attribute API activity, provided through NewClientParams.SourceTag -// or NewClientBaseParams.SourceTag. +// - baseParams: A NewClientBaseParams object that holds the configuration for the Pinecone client. // // Example: // @@ -72,9 +70,8 @@ import ( // [Inference API]: https://docs.pinecone.io/reference/api/2024-07/inference/generate-embeddings type Client struct { Inference *InferenceService - headers map[string]string restClient *db_control.Client - sourceTag string + baseParams *NewClientBaseParams } // NewClientParams holds the parameters for creating a new Client instance while authenticating via an API key. @@ -210,8 +207,8 @@ func NewClient(in NewClientParams) (*Client, error) { // fmt.Println("Successfully created a new Client object!") // } func NewClientBase(in NewClientBaseParams) (*Client, error) { - clientOptions := buildClientBaseOptions(in) - inference_client_options := buildInferenceBaseOptions(in) + controlOptions := buildClientBaseOptions(in) + inferenceOptions := buildInferenceBaseOptions(in) var err error controlHostOverride := valueOrFallback(in.Host, os.Getenv("PINECONE_CONTROLLER_HOST")) @@ -222,16 +219,20 @@ func NewClientBase(in NewClientBaseParams) (*Client, error) { } } - db_control_client, err := db_control.NewClient(valueOrFallback(controlHostOverride, "https://api.pinecone.io"), clientOptions...) + dbControlClient, err := db_control.NewClient(valueOrFallback(controlHostOverride, "https://api.pinecone.io"), controlOptions...) if err != nil { return nil, err } - inference_client, err := inference.NewClient(valueOrFallback(controlHostOverride, "https://api.pinecone.io"), inference_client_options...) + inferenceClient, err := inference.NewClient(valueOrFallback(controlHostOverride, "https://api.pinecone.io"), inferenceOptions...) if err != nil { return nil, err } - c := Client{Inference: &InferenceService{client: inference_client}, restClient: db_control_client, sourceTag: in.SourceTag, headers: in.Headers} + c := Client{ + Inference: &InferenceService{client: inferenceClient}, + restClient: dbControlClient, + baseParams: &in, + } return &c, nil } @@ -304,11 +305,18 @@ func (c *Client) Index(in NewIndexConnParams, dialOpts ...grpc.DialOption) (*Ind in.AdditionalMetadata[key] = value } + dbDataOptions := buildDataClientBaseOptions(*c.baseParams) + dbDataClient, err := db_data_rest.NewClient(ensureHostHasHttps(in.Host), dbDataOptions...) + if err != nil { + return nil, err + } + idx, err := newIndexConnection(newIndexParameters{ host: in.Host, namespace: in.Namespace, - sourceTag: c.sourceTag, + sourceTag: c.baseParams.SourceTag, additionalMetadata: in.AdditionalMetadata, + dbDataClient: dbDataClient, }, dialOpts...) if err != nil { return nil, err @@ -316,6 +324,16 @@ func (c *Client) Index(in NewIndexConnParams, dialOpts ...grpc.DialOption) (*Ind return idx, nil } +func ensureHostHasHttps(host string) string { + if strings.HasPrefix("http://", host) { + return strings.Replace(host, "http://", "https://", 1) + } else if !strings.HasPrefix("https://", host) { + return "https://" + host + } + + return host +} + // ListIndexes retrieves a list of all Indexes in a Pinecone [project]. // // Parameters: @@ -1332,7 +1350,7 @@ func (c *Client) extractAuthHeader() map[string]string { "access_token", } - for key, value := range c.headers { + for key, value := range c.baseParams.Headers { for _, checkKey := range possibleAuthKeys { if strings.ToLower(key) == checkKey { return map[string]string{key: value} @@ -1525,6 +1543,22 @@ func buildInferenceBaseOptions(in NewClientBaseParams) []inference.ClientOption return clientOptions } +func buildDataClientBaseOptions(in NewClientBaseParams) []db_data_rest.ClientOption { + clientOptions := []db_data_rest.ClientOption{} + headerProviders := buildSharedProviderHeaders(in) + + for _, provider := range headerProviders { + clientOptions = append(clientOptions, db_data_rest.WithRequestEditorFn(provider.Intercept)) + } + + // apply custom http client if provided + if in.RestClient != nil { + clientOptions = append(clientOptions, db_data_rest.WithHTTPClient(in.RestClient)) + } + + return clientOptions +} + func buildSharedProviderHeaders(in NewClientBaseParams) []*provider.CustomHeader { providers := []*provider.CustomHeader{} diff --git a/pinecone/client_test.go b/pinecone/client_test.go index 17200e1..7771849 100644 --- a/pinecone/client_test.go +++ b/pinecone/client_test.go @@ -388,9 +388,9 @@ func TestNewClientParamsSetUnit(t *testing.T) { client, err := NewClient(NewClientParams{ApiKey: apiKey}) require.NoError(t, err) - require.Empty(t, client.sourceTag, "Expected client to have empty sourceTag") - require.NotNil(t, client.headers, "Expected client headers to not be nil") - apiKeyHeader, ok := client.headers["Api-Key"] + require.Empty(t, client.baseParams.SourceTag, "Expected client to have empty sourceTag") + require.NotNil(t, client.baseParams.Headers, "Expected client headers to not be nil") + apiKeyHeader, ok := client.baseParams.Headers["Api-Key"] require.True(t, ok, "Expected client to have an 'Api-Key' header") require.Equal(t, apiKey, apiKeyHeader, "Expected 'Api-Key' header to match provided ApiKey") require.Equal(t, 3, len(client.restClient.RequestEditors), "Expected client to have correct number of request editors") @@ -405,10 +405,10 @@ func TestNewClientParamsSetSourceTagUnit(t *testing.T) { }) require.NoError(t, err) - apiKeyHeader, ok := client.headers["Api-Key"] + apiKeyHeader, ok := client.baseParams.Headers["Api-Key"] require.True(t, ok, "Expected client to have an 'Api-Key' header") require.Equal(t, apiKey, apiKeyHeader, "Expected 'Api-Key' header to match provided ApiKey") - require.Equal(t, sourceTag, client.sourceTag, "Expected client to have sourceTag '%s', but got '%s'", sourceTag, client.sourceTag) + require.Equal(t, sourceTag, client.baseParams.SourceTag, "Expected client to have sourceTag '%s', but got '%s'", sourceTag, client.baseParams.SourceTag) require.Equal(t, 3, len(client.restClient.RequestEditors), "Expected client to have %s request editors, but got %s", 2, len(client.restClient.RequestEditors)) } @@ -418,10 +418,10 @@ func TestNewClientParamsSetHeadersUnit(t *testing.T) { client, err := NewClient(NewClientParams{ApiKey: apiKey, Headers: headers}) require.NoError(t, err) - apiKeyHeader, ok := client.headers["Api-Key"] + apiKeyHeader, ok := client.baseParams.Headers["Api-Key"] require.True(t, ok, "Expected client to have an 'Api-Key' header") require.Equal(t, apiKey, apiKeyHeader, "Expected 'Api-Key' header to match provided ApiKey") - require.Equal(t, client.headers, headers, "Expected client to have headers '%+v', but got '%+v'", headers, client.headers) + require.Equal(t, client.baseParams.Headers, headers, "Expected client to have headers '%+v', but got '%+v'", headers, client.baseParams.Headers) require.Equal(t, 4, len(client.restClient.RequestEditors), "Expected client to have %s request editors, but got %s", 3, len(client.restClient.RequestEditors)) } @@ -1072,7 +1072,7 @@ func TestNewClientUnit(t *testing.T) { } else { assert.NoError(t, err) assert.NotNil(t, client) - assert.Equal(t, tc.expectedHeaders, client.headers, "Expected headers to be '%v', but got '%v'", tc.expectedHeaders, client.headers) + assert.Equal(t, tc.expectedHeaders, client.baseParams.Headers, "Expected headers to be '%v', but got '%v'", tc.expectedHeaders, client.baseParams.Headers) } }) } diff --git a/pinecone/index_connection.go b/pinecone/index_connection.go index 3fc5202..a850be1 100644 --- a/pinecone/index_connection.go +++ b/pinecone/index_connection.go @@ -3,12 +3,16 @@ package pinecone import ( "context" "crypto/tls" + "encoding/json" "fmt" + "io" "log" + "net/http" "net/url" "strings" - "github.com/pinecone-io/go-pinecone/internal/gen/data" + db_data_grpc "github.com/pinecone-io/go-pinecone/internal/gen/db_data/grpc" + db_data_rest "github.com/pinecone-io/go-pinecone/internal/gen/db_data/rest" "github.com/pinecone-io/go-pinecone/internal/useragent" "google.golang.org/grpc" "google.golang.org/grpc/credentials" @@ -26,7 +30,8 @@ import ( type IndexConnection struct { Namespace string additionalMetadata map[string]string - dataClient *data.VectorServiceClient + restClient *db_data_rest.Client + grpcClient *db_data_grpc.VectorServiceClient grpcConn *grpc.ClientConn } @@ -35,6 +40,7 @@ type newIndexParameters struct { namespace string sourceTag string additionalMetadata map[string]string + dbDataClient *db_data_rest.Client } func newIndexConnection(in newIndexParameters, dialOpts ...grpc.DialOption) (*IndexConnection, error) { @@ -65,11 +71,12 @@ func newIndexConnection(in newIndexParameters, dialOpts ...grpc.DialOption) (*In return nil, err } - dataClient := data.NewVectorServiceClient(conn) + dataClient := db_data_grpc.NewVectorServiceClient(conn) idx := IndexConnection{ Namespace: in.namespace, - dataClient: &dataClient, + restClient: in.dbDataClient, + grpcClient: &dataClient, grpcConn: conn, additionalMetadata: in.additionalMetadata, } @@ -185,17 +192,17 @@ func (idx *IndexConnection) Close() error { // log.Fatalf("Successfully upserted %d vector(s)!\n", count) // } func (idx *IndexConnection) UpsertVectors(ctx context.Context, in []*Vector) (uint32, error) { - vectors := make([]*data.Vector, len(in)) + vectors := make([]*db_data_grpc.Vector, len(in)) for i, v := range in { vectors[i] = vecToGrpc(v) } - req := &data.UpsertRequest{ + req := &db_data_grpc.UpsertRequest{ Vectors: vectors, Namespace: idx.Namespace, } - res, err := (*idx.dataClient).Upsert(idx.akCtx(ctx), req) + res, err := (*idx.grpcClient).Upsert(idx.akCtx(ctx), req) if err != nil { return 0, err } @@ -263,12 +270,12 @@ type FetchVectorsResponse struct { // fmt.Println("No vectors found") // } func (idx *IndexConnection) FetchVectors(ctx context.Context, ids []string) (*FetchVectorsResponse, error) { - req := &data.FetchRequest{ + req := &db_data_grpc.FetchRequest{ Ids: ids, Namespace: idx.Namespace, } - res, err := (*idx.dataClient).Fetch(idx.akCtx(ctx), req) + res, err := (*idx.grpcClient).Fetch(idx.akCtx(ctx), req) if err != nil { return nil, err } @@ -371,13 +378,13 @@ type ListVectorsResponse struct { // fmt.Printf("Found %d vector(s)\n", len(res.VectorIds)) // } func (idx *IndexConnection) ListVectors(ctx context.Context, in *ListVectorsRequest) (*ListVectorsResponse, error) { - req := &data.ListRequest{ + req := &db_data_grpc.ListRequest{ Prefix: in.Prefix, Limit: in.Limit, PaginationToken: in.PaginationToken, Namespace: idx.Namespace, } - res, err := (*idx.dataClient).List(idx.akCtx(ctx), req) + res, err := (*idx.grpcClient).List(idx.akCtx(ctx), req) if err != nil { return nil, err } @@ -390,7 +397,7 @@ func (idx *IndexConnection) ListVectors(ctx context.Context, in *ListVectorsRequ return &ListVectorsResponse{ VectorIds: vectorIds, Usage: toUsage(res.Usage), - NextPaginationToken: toPaginationToken(res.Pagination), + NextPaginationToken: toPaginationTokenGrpc(res.Pagination), Namespace: idx.Namespace, }, nil } @@ -501,7 +508,7 @@ type QueryVectorsResponse struct { // } // } func (idx *IndexConnection) QueryByVectorValues(ctx context.Context, in *QueryByVectorValuesRequest) (*QueryVectorsResponse, error) { - req := &data.QueryRequest{ + req := &db_data_grpc.QueryRequest{ Namespace: idx.Namespace, TopK: in.TopK, Filter: in.MetadataFilter, @@ -527,7 +534,7 @@ func (idx *IndexConnection) QueryByVectorValues(ctx context.Context, in *QueryBy type QueryByVectorIdRequest struct { VectorId string TopK uint32 - metadataFilter *MetadataFilter + MetadataFilter *MetadataFilter IncludeValues bool IncludeMetadata bool SparseValues *SparseValues @@ -591,11 +598,11 @@ type QueryByVectorIdRequest struct { // } // } func (idx *IndexConnection) QueryByVectorId(ctx context.Context, in *QueryByVectorIdRequest) (*QueryVectorsResponse, error) { - req := &data.QueryRequest{ + req := &db_data_grpc.QueryRequest{ Id: in.VectorId, Namespace: idx.Namespace, TopK: in.TopK, - Filter: in.metadataFilter, + Filter: in.MetadataFilter, IncludeValues: in.IncludeValues, IncludeMetadata: in.IncludeMetadata, SparseVector: sparseValToGrpc(in.SparseValues), @@ -652,7 +659,7 @@ func (idx *IndexConnection) QueryByVectorId(ctx context.Context, in *QueryByVect // log.Fatalf("Failed to delete vector with ID: %s. Error: %s\n", vectorId, err) // } func (idx *IndexConnection) DeleteVectorsById(ctx context.Context, ids []string) error { - req := data.DeleteRequest{ + req := db_data_grpc.DeleteRequest{ Ids: ids, Namespace: idx.Namespace, } @@ -716,7 +723,7 @@ func (idx *IndexConnection) DeleteVectorsById(ctx context.Context, ids []string) // log.Fatalf("Failed to delete vector(s) with filter: %+v. Error: %s\n", filter, err) // } func (idx *IndexConnection) DeleteVectorsByFilter(ctx context.Context, metadataFilter *MetadataFilter) error { - req := data.DeleteRequest{ + req := db_data_grpc.DeleteRequest{ Filter: metadataFilter, Namespace: idx.Namespace, } @@ -768,7 +775,7 @@ func (idx *IndexConnection) DeleteVectorsByFilter(ctx context.Context, metadataF // log.Fatalf("Failed to delete vectors in namespace: \"%s\". Error: %s", idxConnection.Namespace, err) // } func (idx *IndexConnection) DeleteAllVectorsInNamespace(ctx context.Context) error { - req := data.DeleteRequest{ + req := db_data_grpc.DeleteRequest{ Namespace: idx.Namespace, DeleteAll: true, } @@ -842,7 +849,7 @@ func (idx *IndexConnection) UpdateVector(ctx context.Context, in *UpdateVectorRe return fmt.Errorf("a vector ID plus at least one of Values, SparseValues, or Metadata must be provided to update a vector") } - req := &data.UpdateRequest{ + req := &db_data_grpc.UpdateRequest{ Id: in.Id, Values: in.Values, SparseValues: sparseValToGrpc(in.SparseValues), @@ -850,7 +857,7 @@ func (idx *IndexConnection) UpdateVector(ctx context.Context, in *UpdateVectorRe Namespace: idx.Namespace, } - _, err := (*idx.dataClient).Update(idx.akCtx(ctx), req) + _, err := (*idx.grpcClient).Update(idx.akCtx(ctx), req) return err } @@ -973,10 +980,10 @@ func (idx *IndexConnection) DescribeIndexStats(ctx context.Context) (*DescribeIn // } // } func (idx *IndexConnection) DescribeIndexStatsFiltered(ctx context.Context, metadataFilter *MetadataFilter) (*DescribeIndexStatsResponse, error) { - req := &data.DescribeIndexStatsRequest{ + req := &db_data_grpc.DescribeIndexStatsRequest{ Filter: metadataFilter, } - res, err := (*idx.dataClient).DescribeIndexStats(idx.akCtx(ctx), req) + res, err := (*idx.grpcClient).DescribeIndexStats(idx.akCtx(ctx), req) if err != nil { return nil, err } @@ -996,8 +1003,309 @@ func (idx *IndexConnection) DescribeIndexStatsFiltered(ctx context.Context, meta }, nil } -func (idx *IndexConnection) query(ctx context.Context, req *data.QueryRequest) (*QueryVectorsResponse, error) { - res, err := (*idx.dataClient).Query(idx.akCtx(ctx), req) +// StartImportResponse holds the response parameters for the StartImport method. +// +// Fields: +// - Id: The ID of the import process that was started. +type StartImportResponse struct { + Id string `json:"id,omitempty"` +} + +// StartImport imports data from a storage provider into an index. The uri parameter must start with the +// scheme of a supported storage provider (e.g. "s3://"). For buckets that are not publicly readable, you will also need to +// separately configure a [storage integration] and pass the integration id. +// +// Returns a pointer to a StartImportResponse object with the import ID or an error if the request fails. +// +// Parameters: +// - ctx: A context.Context object controls the request's lifetime, +// allowing for the request to be canceled or to timeout according to the context's deadline. +// - uri: The URI of the data to import. The URI must start with the scheme of a supported storage provider. +// - integrationId: If your bucket requires authentication to access, you need to pass the id of your storage integration using this property. +// Pass nil if not required. +// - errorMode: If set to "continue", the import operation will continue even if some records fail to import. +// Pass "abort" to stop the import operation if any records fail. Will default to "continue" if nil is passed. +// +// Example: +// +// ctx := context.Background() +// +// clientParams := pinecone.NewClientParams{ +// ApiKey: "YOUR_API_KEY", +// SourceTag: "your_source_identifier", // optional +// } +// +// pc, err := pinecone.NewClient(clientParams) +// if err != nil { +// log.Fatalf("Failed to create Client: %v", err) +// } +// +// idx, err := pc.DescribeIndex(ctx, "your-index-name") +// if err != nil { +// log.Fatalf("Failed to describe index \"%s\". Error:%s", idx.Name, err) +// } +// +// idxConnection, err := pc.Index(pinecone.NewIndexConnParams{Host: idx.Host}) +// if err != nil { +// log.Fatalf("Failed to create IndexConnection for Host: %v. Error: %v", idx.Host, err) +// } +// +// uri := "s3://your-bucket/your-file.csv" +// errorMode := "abort" +// importRes, err := idxConnection.StartImport(ctx, uri, nil, &errorMode) +// if err != nil { +// log.Fatalf("Failed to start import: %v", err) +// } +// fmt.Printf("import starteed with ID: %s", importRes.Id) +// +// [storage integration]: https://docs.pinecone.io/guides/operations/integrations/manage-storage-integrations +func (idx *IndexConnection) StartImport(ctx context.Context, uri string, integrationId *string, errorMode *ImportErrorMode) (*StartImportResponse, error) { + if uri == "" { + return nil, fmt.Errorf("must specify a uri to start an import") + } + + req := db_data_rest.StartImportRequest{ + Uri: &uri, + IntegrationId: integrationId, + } + + if errorMode != nil { + req.ErrorMode = &db_data_rest.ImportErrorMode{ + OnError: pointerOrNil(db_data_rest.ImportErrorModeOnError(*errorMode)), + } + } + + res, err := (*idx.restClient).StartBulkImport(idx.akCtx(ctx), req) + if err != nil { + return nil, err + } + defer res.Body.Close() + + if res.StatusCode != http.StatusOK { + return nil, handleErrorResponseBody(res, "failed to start import: ") + } + + return decodeStartImportResponse(res.Body) +} + +// DescribeImport retrieves information about a specific import operation. +// +// Returns an Import object representing the current state of the import or an error if the request fails. +// +// Parameters: +// - ctx: A context.Context object controls the request's lifetime, +// allowing for the request to be canceled or to timeout according to the context's deadline. +// - id: The id of the import operation. This is returned when you call [IndexConnection.StartImport], or can be retrieved +// through the [IndexConnection.ListImports] method. +// +// Example: +// +// ctx := context.Background() +// +// clientParams := pinecone.NewClientParams{ +// ApiKey: "YOUR_API_KEY", +// SourceTag: "your_source_identifier", // optional +// } +// +// pc, err := pinecone.NewClient(clientParams) +// if err != nil { +// log.Fatalf("Failed to create Client: %v", err) +// } +// +// idx, err := pc.DescribeIndex(ctx, "your-index-name") +// if err != nil { +// log.Fatalf("Failed to describe index \"%s\". Error:%s", idx.Name, err) +// } +// +// idxConnection, err := pc.Index(pinecone.NewIndexConnParams{Host: idx.Host}) +// if err != nil { +// log.Fatalf("Failed to create IndexConnection for Host: %v. Error: %v", idx.Host, err) +// } +// importDesc, err := idxConnection.DescribeImport(ctx, "your-import-id") +// if err != nil { +// log.Fatalf("Failed to describe import: %s - %v", "your-import-id", err) +// } +// fmt.Printf("Import ID: %s, Status: %s", importDesc.Id, importDesc.Status) +func (idx *IndexConnection) DescribeImport(ctx context.Context, id string) (*Import, error) { + res, err := (*idx.restClient).DescribeBulkImport(idx.akCtx(ctx), id) + if err != nil { + return nil, err + } + defer res.Body.Close() + + importModel, err := decodeImportModel(res.Body) + if err != nil { + return nil, err + } + return toImport(importModel), nil +} + +// ListImportsRequest holds the parameters for the ListImports method. +// +// Fields: +// - Limit: The maximum number of imports to return. +// - PaginationToken: The token to retrieve the next page of imports, if available. +type ListImportsRequest struct { + Limit *int32 + PaginationToken *string +} + +// ListImportsResponse holds the result of listing bulk imports. +// +// Fields: +// - Imports: The list of Import objects returned. +// - NextPaginationToken: The token for paginating through results, if more imports are available. +type ListImportsResponse struct { + Imports []*Import `json:"imports,omitempty"` + NextPaginationToken *string `json:"next_pagination_token,omitempty"` +} + +// ListImports returns information about import operations. It returns operations in a +// paginated form, with a pagination token to fetch the next page of results. +// +// Returns a pointer to a ListImportsResponse object or an error if the request fails. +// +// Parameters: +// - ctx: A context.Context object controls the request's lifetime, +// allowing for the request to be canceled or to timeout according to the context's deadline. +// - req: A ListImportsRequest object containing pagination and filter options. +// +// Example: +// +// ctx := context.Background() +// +// clientParams := NewClientParams{ +// ApiKey: "YOUR_API_KEY", +// SourceTag: "your_source_identifier", // optional +// } +// +// pc, err := NewClient(clientParams) +// if err != nil { +// log.Fatalf("Failed to create Client: %v", err) +// } +// +// idx, err := pc.DescribeIndex(ctx, "your-index-name") +// if err != nil { +// log.Fatalf("Failed to describe index \"%s\". Error:%s", idx.Name, err) +// } +// +// idxConnection, err := pc.Index(NewIndexConnParams{Host: idx.Host}) +// if err != nil { +// log.Fatalf("Failed to create IndexConnection for Host: %v. Error: %v", idx.Host, err) +// } +// +// limit := int32(10) +// firstImportPage, err := idxConnection.ListImports(ctx, &limit, nil) +// if err != nil { +// log.Fatalf("Failed to list imports: %v", err) +// } +// fmt.Printf("First page of imports: %+v", firstImportPage.Imports) +// +// paginationToken := firstImportPage.NextPaginationToken +// nextImportPage, err := idxConnection.ListImports(ctx, &limit, paginationToken) +// if err != nil { +// log.Fatalf("Failed to list imports: %v", err) +// } +// fmt.Printf("Second page of imports: %+v", nextImportPage.Imports) +func (idx *IndexConnection) ListImports(ctx context.Context, limit *int32, paginationToken *string) (*ListImportsResponse, error) { + params := db_data_rest.ListBulkImportsParams{ + Limit: limit, + PaginationToken: paginationToken, + } + + res, err := (*idx.restClient).ListBulkImports(idx.akCtx(ctx), ¶ms) + if err != nil { + return nil, err + } + + listImportsResponse, err := decodeListImportsResponse(res.Body) + if err != nil { + return nil, err + } + + return listImportsResponse, nil +} + +// CancelImport cancels an import operation by id. +// +// Returns an error if the request fails. +// +// Parameters: +// - ctx: A context.Context object controls the request's lifetime, +// allowing for the request to be canceled or to timeout according to the context's deadline. +// - id: The id of the import operation to cancel. +// +// Example: +// +// ctx := context.Background() +// +// clientParams := NewClientParams{ +// ApiKey: "YOUR_API_KEY", +// SourceTag: "your_source_identifier", // optional +// } +// +// pc, err := NewClient(clientParams) +// if err != nil { +// log.Fatalf("Failed to create Client: %v", err) +// } +// +// idx, err := pc.DescribeIndex(ctx, "your-index-name") +// if err != nil { +// log.Fatalf("Failed to describe index \"%s\". Error:%s", idx.Name, err) +// } +// +// idxConnection, err := pc.Index(NewIndexConnParams{Host: idx.Host}) +// if err != nil { +// log.Fatalf("Failed to create IndexConnection for Host: %v. Error: %v", idx.Host, err) +// } +// +// err = idxConnection.CancelImport(ctx, "your-import-id") +// if err != nil { +// log.Fatalf("Failed to cancel import: %s", "your-import-id") +// } +func (idx *IndexConnection) CancelImport(ctx context.Context, id string) error { + res, err := (*idx.restClient).CancelBulkImport(idx.akCtx(ctx), id) + if err != nil { + return err + } + defer res.Body.Close() + + if res.StatusCode != http.StatusOK { + return handleErrorResponseBody(res, "failed to cancel import: ") + } + + return nil +} + +func decodeListImportsResponse(body io.ReadCloser) (*ListImportsResponse, error) { + var listImportsResponse *db_data_rest.ListImportsResponse + if err := json.NewDecoder(body).Decode(&listImportsResponse); err != nil { + return nil, err + } + + return toListImportsResponse(listImportsResponse), nil +} + +func decodeImportModel(body io.ReadCloser) (*db_data_rest.ImportModel, error) { + var importModel db_data_rest.ImportModel + if err := json.NewDecoder(body).Decode(&importModel); err != nil { + return nil, err + } + + return &importModel, nil +} + +func decodeStartImportResponse(body io.ReadCloser) (*StartImportResponse, error) { + var importResponse *db_data_rest.StartImportResponse + if err := json.NewDecoder(body).Decode(&importResponse); err != nil { + return nil, err + } + + return toImportResponse(importResponse), nil +} + +func (idx *IndexConnection) query(ctx context.Context, req *db_data_grpc.QueryRequest) (*QueryVectorsResponse, error) { + res, err := (*idx.grpcClient).Query(idx.akCtx(ctx), req) if err != nil { return nil, err } @@ -1014,8 +1322,8 @@ func (idx *IndexConnection) query(ctx context.Context, req *data.QueryRequest) ( }, nil } -func (idx *IndexConnection) delete(ctx context.Context, req *data.DeleteRequest) error { - _, err := (*idx.dataClient).Delete(idx.akCtx(ctx), req) +func (idx *IndexConnection) delete(ctx context.Context, req *db_data_grpc.DeleteRequest) error { + _, err := (*idx.grpcClient).Delete(idx.akCtx(ctx), req) return err } @@ -1029,7 +1337,7 @@ func (idx *IndexConnection) akCtx(ctx context.Context) context.Context { return metadata.AppendToOutgoingContext(ctx, newMetadata...) } -func toVector(vector *data.Vector) *Vector { +func toVector(vector *db_data_grpc.Vector) *Vector { if vector == nil { return nil } @@ -1041,11 +1349,11 @@ func toVector(vector *data.Vector) *Vector { } } -func toScoredVector(sv *data.ScoredVector) *ScoredVector { +func toScoredVector(sv *db_data_grpc.ScoredVector) *ScoredVector { if sv == nil { return nil } - v := toVector(&data.Vector{ + v := toVector(&db_data_grpc.Vector{ Id: sv.Id, Values: sv.Values, SparseValues: sv.SparseValues, @@ -1057,7 +1365,7 @@ func toScoredVector(sv *data.ScoredVector) *ScoredVector { } } -func toSparseValues(sv *data.SparseValues) *SparseValues { +func toSparseValues(sv *db_data_grpc.SparseValues) *SparseValues { if sv == nil { return nil } @@ -1067,7 +1375,7 @@ func toSparseValues(sv *data.SparseValues) *SparseValues { } } -func toUsage(u *data.Usage) *Usage { +func toUsage(u *db_data_grpc.Usage) *Usage { if u == nil { return nil } @@ -1076,18 +1384,66 @@ func toUsage(u *data.Usage) *Usage { } } -func toPaginationToken(p *data.Pagination) *string { +func toPaginationTokenGrpc(p *db_data_grpc.Pagination) *string { if p == nil { return nil } return &p.Next } -func vecToGrpc(v *Vector) *data.Vector { +func toPaginationTokenRest(p *db_data_rest.Pagination) *string { + if p == nil { + return nil + } + return p.Next +} + +func toImport(importModel *db_data_rest.ImportModel) *Import { + if importModel == nil { + return nil + } + + return &Import{ + Id: *importModel.Id, + Uri: *importModel.Uri, + Status: ImportStatus(*importModel.Status), + CreatedAt: importModel.CreatedAt, + FinishedAt: importModel.FinishedAt, + Error: importModel.Error, + } +} + +func toImportResponse(importResponse *db_data_rest.StartImportResponse) *StartImportResponse { + if importResponse == nil { + return nil + } + + return &StartImportResponse{ + Id: derefOrDefault(importResponse.Id, ""), + } +} + +func toListImportsResponse(listImportsResponse *db_data_rest.ListImportsResponse) *ListImportsResponse { + if listImportsResponse == nil { + return nil + } + + imports := make([]*Import, len(*listImportsResponse.Data)) + for i, importModel := range *listImportsResponse.Data { + imports[i] = toImport(&importModel) + } + + return &ListImportsResponse{ + Imports: imports, + NextPaginationToken: toPaginationTokenRest(listImportsResponse.Pagination), + } +} + +func vecToGrpc(v *Vector) *db_data_grpc.Vector { if v == nil { return nil } - return &data.Vector{ + return &db_data_grpc.Vector{ Id: v.Id, Values: v.Values, Metadata: v.Metadata, @@ -1095,11 +1451,11 @@ func vecToGrpc(v *Vector) *data.Vector { } } -func sparseValToGrpc(sv *SparseValues) *data.SparseValues { +func sparseValToGrpc(sv *SparseValues) *db_data_grpc.SparseValues { if sv == nil { return nil } - return &data.SparseValues{ + return &db_data_grpc.SparseValues{ Indices: sv.Indices, Values: sv.Values, } diff --git a/pinecone/index_connection_test.go b/pinecone/index_connection_test.go index adc98ff..8eab1ab 100644 --- a/pinecone/index_connection_test.go +++ b/pinecone/index_connection_test.go @@ -8,7 +8,7 @@ import ( "testing" "time" - "github.com/pinecone-io/go-pinecone/internal/gen/data" + db_data_grpc "github.com/pinecone-io/go-pinecone/internal/gen/db_data/grpc" "github.com/pinecone-io/go-pinecone/internal/utils" "google.golang.org/grpc" "google.golang.org/grpc/metadata" @@ -177,7 +177,7 @@ func (ts *IntegrationTests) TestMetadataAppliedToRequests() { require.True(ts.T(), ok, "Expected client to have an 'api-key' header") require.Equal(ts.T(), apiKey, apiKeyHeader, "Expected 'api-key' header to equal %s", apiKey) require.Equal(ts.T(), namespace, idxConn.Namespace, "Expected idxConn to have namespace '%s', but got '%s'", namespace, idxConn.Namespace) - require.NotNil(ts.T(), idxConn.dataClient, "Expected idxConn to have non-nil dataClient") + require.NotNil(ts.T(), idxConn.grpcClient, "Expected idxConn to have non-nil dataClient") require.NotNil(ts.T(), idxConn.grpcConn, "Expected idxConn to have non-nil grpcConn") // initiate request to trigger the MetadataInterceptor @@ -227,20 +227,22 @@ func (ts *IntegrationTests) TestUpdateVectorMetadata() { }) assert.NoError(ts.T(), err) - time.Sleep(5 * time.Second) + time.Sleep(10 * time.Second) vector, err := ts.idxConn.FetchVectors(ctx, []string{ts.vectorIds[0]}) if err != nil { ts.FailNow(fmt.Sprintf("Failed to fetch vector: %v", err)) } + assert.NotNil(ts.T(), vector.Vectors[ts.vectorIds[0]].Metadata, "Metadata is nil after update") + expectedGenre := expectedMetadataMap.Fields["genre"].GetStringValue() actualGenre := vector.Vectors[ts.vectorIds[0]].Metadata.Fields["genre"].GetStringValue() assert.Equal(ts.T(), expectedGenre, actualGenre, "Metadata does not match") } -func (ts *IntegrationTests) TestUpdateVectorSparseValues() error { +func (ts *IntegrationTests) TestUpdateVectorSparseValues() { ctx := context.Background() dims := int(ts.dimension) @@ -269,8 +271,44 @@ func (ts *IntegrationTests) TestUpdateVectorSparseValues() error { actualSparseValues := vector.Vectors[ts.vectorIds[0]].SparseValues.Values assert.ElementsMatch(ts.T(), expectedSparseValues.Values, actualSparseValues, "Sparse values do not match") +} + +func (ts *IntegrationTests) TestImportFlowHappyPath() { + if ts.indexType != "serverless" { + ts.T().Skip("Skipping import flow test for non-serverless index") + } - return nil + testImportUri := "s3://dev-bulk-import-datasets-pub/10-records-dim-10/" + ctx := context.Background() + + startRes, err := ts.idxConn.StartImport(ctx, testImportUri, nil, nil) + assert.NoError(ts.T(), err) + assert.NotNil(ts.T(), startRes) + + assert.NotNil(ts.T(), startRes.Id) + describeRes, err := ts.idxConn.DescribeImport(ctx, startRes.Id) + assert.NoError(ts.T(), err) + assert.NotNil(ts.T(), describeRes) + assert.Equal(ts.T(), startRes.Id, describeRes.Id) + + limit := int32(10) + listRes, err := ts.idxConn.ListImports(ctx, &limit, nil) + assert.NoError(ts.T(), err) + assert.NotNil(ts.T(), listRes) + + err = ts.idxConn.CancelImport(ctx, startRes.Id) + assert.NoError(ts.T(), err) +} + +func (ts *IntegrationTests) TestImportFlowNoUriError() { + if ts.indexType != "serverless" { + ts.T().Skip("Skipping import flow test for non-serverless index") + } + + ctx := context.Background() + _, err := ts.idxConn.StartImport(ctx, "", nil, nil) + assert.Error(ts.T(), err) + assert.Contains(ts.T(), err.Error(), "must specify a uri") } // Unit tests: @@ -299,7 +337,7 @@ func TestNewIndexConnection(t *testing.T) { require.True(t, ok, "Expected client to have an 'api-key' header") require.Equal(t, apiKey, apiKeyHeader, "Expected 'api-key' header to equal %s", apiKey) require.Empty(t, idxConn.Namespace, "Expected idxConn to have empty namespace, but got '%s'", idxConn.Namespace) - require.NotNil(t, idxConn.dataClient, "Expected idxConn to have non-nil dataClient") + require.NotNil(t, idxConn.grpcClient, "Expected idxConn to have non-nil dataClient") require.NotNil(t, idxConn.grpcConn, "Expected idxConn to have non-nil grpcConn") } @@ -320,7 +358,7 @@ func TestNewIndexConnectionNamespace(t *testing.T) { require.True(t, ok, "Expected client to have an 'api-key' header") require.Equal(t, apiKey, apiKeyHeader, "Expected 'api-key' header to equal %s", apiKey) require.Equal(t, namespace, idxConn.Namespace, "Expected idxConn to have namespace '%s', but got '%s'", namespace, idxConn.Namespace) - require.NotNil(t, idxConn.dataClient, "Expected idxConn to have non-nil dataClient") + require.NotNil(t, idxConn.grpcClient, "Expected idxConn to have non-nil dataClient") require.NotNil(t, idxConn.grpcConn, "Expected idxConn to have non-nil grpcConn") } @@ -518,7 +556,7 @@ func TestMarshalDescribeIndexStatsResponseUnit(t *testing.T) { func TestToVectorUnit(t *testing.T) { tests := []struct { name string - vector *data.Vector + vector *db_data_grpc.Vector expected *Vector }{ { @@ -528,7 +566,7 @@ func TestToVectorUnit(t *testing.T) { }, { name: "Pass dense vector", - vector: &data.Vector{ + vector: &db_data_grpc.Vector{ Id: "dense-1", Values: []float32{0.01, 0.02, 0.03}, }, @@ -539,10 +577,10 @@ func TestToVectorUnit(t *testing.T) { }, { name: "Pass sparse vector", - vector: &data.Vector{ + vector: &db_data_grpc.Vector{ Id: "sparse-1", Values: nil, - SparseValues: &data.SparseValues{ + SparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -558,10 +596,10 @@ func TestToVectorUnit(t *testing.T) { }, { name: "Pass hybrid vector", - vector: &data.Vector{ + vector: &db_data_grpc.Vector{ Id: "hybrid-1", Values: []float32{0.01, 0.02, 0.03}, - SparseValues: &data.SparseValues{ + SparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -578,10 +616,10 @@ func TestToVectorUnit(t *testing.T) { }, { name: "Pass hybrid vector with metadata", - vector: &data.Vector{ + vector: &db_data_grpc.Vector{ Id: "hybrid-metadata-1", Values: []float32{0.01, 0.02, 0.03}, - SparseValues: &data.SparseValues{ + SparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -616,7 +654,7 @@ func TestToVectorUnit(t *testing.T) { func TestToSparseValuesUnit(t *testing.T) { tests := []struct { name string - sparseValues *data.SparseValues + sparseValues *db_data_grpc.SparseValues expected *SparseValues }{ { @@ -626,7 +664,7 @@ func TestToSparseValuesUnit(t *testing.T) { }, { name: "Pass sparse values", - sparseValues: &data.SparseValues{ + sparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -647,7 +685,7 @@ func TestToSparseValuesUnit(t *testing.T) { func TestToScoredVectorUnit(t *testing.T) { tests := []struct { name string - scoredVector *data.ScoredVector + scoredVector *db_data_grpc.ScoredVector expected *ScoredVector }{ { @@ -657,7 +695,7 @@ func TestToScoredVectorUnit(t *testing.T) { }, { name: "Pass scored dense vector", - scoredVector: &data.ScoredVector{ + scoredVector: &db_data_grpc.ScoredVector{ Id: "dense-1", Values: []float32{0.01, 0.01, 0.01}, Score: 0.1, @@ -672,9 +710,9 @@ func TestToScoredVectorUnit(t *testing.T) { }, { name: "Pass scored sparse vector", - scoredVector: &data.ScoredVector{ + scoredVector: &db_data_grpc.ScoredVector{ Id: "sparse-1", - SparseValues: &data.SparseValues{ + SparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -693,10 +731,10 @@ func TestToScoredVectorUnit(t *testing.T) { }, { name: "Pass scored hybrid vector", - scoredVector: &data.ScoredVector{ + scoredVector: &db_data_grpc.ScoredVector{ Id: "hybrid-1", Values: []float32{0.01, 0.02, 0.03}, - SparseValues: &data.SparseValues{ + SparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -716,10 +754,10 @@ func TestToScoredVectorUnit(t *testing.T) { }, { name: "Pass scored hybrid vector with metadata", - scoredVector: &data.ScoredVector{ + scoredVector: &db_data_grpc.ScoredVector{ Id: "hybrid-metadata-1", Values: []float32{0.01, 0.02, 0.03}, - SparseValues: &data.SparseValues{ + SparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -760,7 +798,7 @@ func TestVecToGrpcUnit(t *testing.T) { tests := []struct { name string vector *Vector - expected *data.Vector + expected *db_data_grpc.Vector }{ { name: "Pass nil vector, expect nil to be returned", @@ -773,7 +811,7 @@ func TestVecToGrpcUnit(t *testing.T) { Id: "dense-1", Values: []float32{0.01, 0.02, 0.03}, }, - expected: &data.Vector{ + expected: &db_data_grpc.Vector{ Id: "dense-1", Values: []float32{0.01, 0.02, 0.03}, }, @@ -788,9 +826,9 @@ func TestVecToGrpcUnit(t *testing.T) { Values: []float32{0.01, 0.03}, }, }, - expected: &data.Vector{ + expected: &db_data_grpc.Vector{ Id: "sparse-1", - SparseValues: &data.SparseValues{ + SparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -806,10 +844,10 @@ func TestVecToGrpcUnit(t *testing.T) { Values: []float32{0.01, 0.03}, }, }, - expected: &data.Vector{ + expected: &db_data_grpc.Vector{ Id: "hybrid-1", Values: []float32{0.01, 0.02, 0.03}, - SparseValues: &data.SparseValues{ + SparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -830,10 +868,10 @@ func TestVecToGrpcUnit(t *testing.T) { }, }, }, - expected: &data.Vector{ + expected: &db_data_grpc.Vector{ Id: "hybrid-metadata-1", Values: []float32{0.01, 0.02, 0.03}, - SparseValues: &data.SparseValues{ + SparseValues: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -859,7 +897,7 @@ func TestSparseValToGrpcUnit(t *testing.T) { name string sparseValues *SparseValues metadata *structpb.Struct - expected *data.SparseValues + expected *db_data_grpc.SparseValues }{ { name: "Pass nil sparse values, expect nil to be returned", @@ -872,7 +910,7 @@ func TestSparseValToGrpcUnit(t *testing.T) { Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, - expected: &data.SparseValues{ + expected: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -888,7 +926,7 @@ func TestSparseValToGrpcUnit(t *testing.T) { "genre": {Kind: &structpb.Value_StringValue{StringValue: "classical"}}, }, }, - expected: &data.SparseValues{ + expected: &db_data_grpc.SparseValues{ Indices: []uint32{0, 2}, Values: []float32{0.01, 0.03}, }, @@ -971,7 +1009,7 @@ func TestToUsageUnit(t *testing.T) { tests := []struct { name string - usage *data.Usage + usage *db_data_grpc.Usage expected *Usage }{ { @@ -981,7 +1019,7 @@ func TestToUsageUnit(t *testing.T) { }, { name: "Pass usage", - usage: &data.Usage{ + usage: &db_data_grpc.Usage{ ReadUnits: &u5, }, expected: &Usage{ @@ -1027,23 +1065,23 @@ func TestNormalizeHostUnit(t *testing.T) { } } -func TestToPaginationToken(t *testing.T) { +func TestToPaginationTokenGrpc(t *testing.T) { tokenForNilCase := "" tokenForPositiveCase := "next-token" tests := []struct { name string - token *data.Pagination + token *db_data_grpc.Pagination expected *string }{ { name: "Pass empty token, expect empty string to be returned", - token: &data.Pagination{}, + token: &db_data_grpc.Pagination{}, expected: &tokenForNilCase, }, { name: "Pass token", - token: &data.Pagination{ + token: &db_data_grpc.Pagination{ Next: "next-token", }, expected: &tokenForPositiveCase, @@ -1052,7 +1090,7 @@ func TestToPaginationToken(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - result := toPaginationToken(tt.token) + result := toPaginationTokenGrpc(tt.token) assert.Equal(t, tt.expected, result, "Expected result to be '%s', but got '%s'", tt.expected, result) }) } diff --git a/pinecone/models.go b/pinecone/models.go index 661adf5..a29cf9d 100644 --- a/pinecone/models.go +++ b/pinecone/models.go @@ -1,6 +1,8 @@ package pinecone import ( + "time" + "google.golang.org/protobuf/types/known/structpb" ) @@ -167,3 +169,55 @@ type MetadataFilter = structpb.Struct // // [attached to, or updated for, a vector]: https://docs.pinecone.io/guides/data/filter-with-metadata#inserting-metadata-into-an-index type Metadata = structpb.Struct + +// ImportStatus represents the status of an import operation. +// +// Values: +// - Cancelled: The import was canceled. +// - Completed: The import completed successfully. +// - Failed: The import encountered an error and did not complete successfully. +// - InProgress: The import is currently in progress. +// - Pending: The import is pending and has not yet started. +type ImportStatus string + +const ( + Cancelled ImportStatus = "Cancelled" + Completed ImportStatus = "Completed" + Failed ImportStatus = "Failed" + InProgress ImportStatus = "InProgress" + Pending ImportStatus = "Pending" +) + +// ImportErrorMode specifies how errors are handled during an import. +// +// Values: +// - Abort: The import process will abort upon encountering an error. +// - Continue: The import process will continue, skipping over records that produce errors. +type ImportErrorMode string + +const ( + Abort ImportErrorMode = "abort" + Continue ImportErrorMode = "continue" +) + +// Import represents the details and status of a bulk import process. +// +// Fields: +// - Id: The unique identifier of the import process. +// - PercentComplete: The percentage of the import process that has been completed. +// - RecordsImported: The total number of records successfully imported. +// - Status: The current status of the import (e.g., "InProgress", "Completed", "Failed"). +// - Uri: The URI of the source data for the import. +// - CreatedAt: The time at which the import process was initiated. +// - FinishedAt: The time at which the import process finished (either successfully or with an error). +// - Error: If the import failed, contains the error message associated with the failure. +type Import struct { + Id string `json:"id,omitempty"` + PercentComplete float32 `json:"percent_complete,omitempty"` + RecordsImported int64 `json:"records_imported,omitempty"` + Status ImportStatus `json:"status,omitempty"` + Uri string `json:"uri,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + FinishedAt *time.Time `json:"finished_at,omitempty"` + Error *string `json:"error,omitempty"` +} diff --git a/pinecone/test_suite.go b/pinecone/test_suite.go index 6596cef..3d9fc0d 100644 --- a/pinecone/test_suite.go +++ b/pinecone/test_suite.go @@ -87,7 +87,20 @@ func (ts *IntegrationTests) TearDownSuite() { _, err = WaitUntilIndexReady(ts, ctx) require.NoError(ts.T(), err) err = ts.client.DeleteIndex(ctx, ts.idxName) - require.NoError(ts.T(), err) + + // If the index failed to delete, wait a bit and retry cleaning up + // Somtimes indexes are stuck upgrading, or have pending collections + retry := 4 + for err != nil && retry > 0 { + time.Sleep(5 * time.Second) + fmt.Printf("Failed to delete index \"%s\". Retrying... (%d/4)\n", ts.idxName, 5-retry) + err = ts.client.DeleteIndex(ctx, ts.idxName) + retry-- + } + + if err != nil { + fmt.Printf("Failed to delete index \"%s\" after 4 retries: %v\n", ts.idxName, err) + } fmt.Printf("\n %s setup suite torn down successfully\n", ts.indexType) }