Skip to content

Commit

Permalink
Remove "bulk" language from README wrt Import operations (#305)
Browse files Browse the repository at this point in the history
  • Loading branch information
aulorbe committed Oct 23, 2024
1 parent 4e655d3 commit f27db40
Show file tree
Hide file tree
Showing 8 changed files with 21 additions and 28 deletions.
10 changes: 5 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -693,17 +693,17 @@ const vectors = [
await index.upsert(vectors);
```

### Bulk import vectors from object storage
### Import vectors from object storage

You can now [import vectors en masse](https://docs.pinecone.io/guides/data/understanding-imports) from object
storage. Bulk Import is a long-running, asynchronous operation that imports large numbers of records into a Pinecone
storage. `Import` is a long-running, asynchronous operation that imports large numbers of records into a Pinecone
serverless index.

In order to import vectors from object storage, they must be stored in Parquet files and adhere to the necessary
[file format](https://docs.pinecone.io/guides/data/understanding-imports#parquet-file-format). Your object storage
must also adhere to the necessary [directory structure](https://docs.pinecone.io/guides/data/understanding-imports#directory-structure).

The following example imports 10 vectors from an Amazon S3 bucket into a Pinecone serverless index:
The following example imports vectors from an Amazon S3 bucket into a Pinecone serverless index:

```typescript
import { Pinecone } from '@pinecone-database/pinecone';
Expand Down Expand Up @@ -737,8 +737,8 @@ You can [start, cancel, and check the status](https://docs.pinecone.io/guides/da

**Notes:**

- Bulk Import only works with Serverless indexes
- Bulk Import is in [public preview](https://docs.pinecone.io/release-notes/feature-availability)
- `Import` only works with Serverless indexes
- `Import` is in [public preview](https://docs.pinecone.io/release-notes/feature-availability)
- The only object storage provider currently supported is [Amazon S3](https://docs.pinecone.io/guides/operations/integrations/integrate-with-amazon-s3)
- Vectors will take _at least 10 minutes_ to appear in your index upon completion of the import operation, since
this operation is optimized for very large workloads
Expand Down
5 changes: 0 additions & 5 deletions src/data/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ import { ListImportsCommand } from './bulk/listImports';
import { DescribeImportCommand } from './bulk/describeImport';
import { CancelImportCommand } from './bulk/cancelImport';
import { BulkOperationsProvider } from './bulk/bulkOperationsProvider';
import { prerelease } from '../utils/prerelease';

export type {
PineconeConfiguration,
Expand Down Expand Up @@ -564,7 +563,6 @@ export class Index<T extends RecordMetadata = RecordMetadata> {
* even if some records fail to import. To inspect failures in "Continue" mode, send a request to {@link listImports}. Pass
* "Abort" to stop the import operation if any records fail to import.
*/
@prerelease('2024-10')
async startImport(uri: string, errorMode?: string, integration?: string) {
return await this._startImportCommand.run(uri, errorMode, integration);
}
Expand Down Expand Up @@ -599,7 +597,6 @@ export class Index<T extends RecordMetadata = RecordMetadata> {
* @param limit - (Optional) Max number of import operations to return per page.
* @param paginationToken - (Optional) Pagination token to continue a previous listing operation.
*/
@prerelease('2024-10')
async listImports(limit?: number, paginationToken?: string) {
return await this._listImportsCommand.run(limit, paginationToken);
}
Expand Down Expand Up @@ -628,7 +625,6 @@ export class Index<T extends RecordMetadata = RecordMetadata> {
*
* @param id - The id of the import operation to describe.
*/
@prerelease('2024-10')
async describeImport(id: string) {
return await this._describeImportCommand.run(id);
}
Expand All @@ -648,7 +644,6 @@ export class Index<T extends RecordMetadata = RecordMetadata> {
*
* @param id - The id of the import operation to cancel.
*/
@prerelease('2024-10')
async cancelImport(id: string) {
return await this._cancelImportCommand.run(id);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ export interface ConfigureIndexRequest {
* @type {{ [key: string]: string; }}
* @memberof ConfigureIndexRequest
*/
tags?: { [key: string]: string; };
tags?: { [key: string]: string; } | null;
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ export interface CreateIndexRequest {
* @type {{ [key: string]: string; }}
* @memberof CreateIndexRequest
*/
tags?: { [key: string]: string; };
tags?: { [key: string]: string; } | null;
/**
*
* @type {IndexSpec}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ export interface IndexModel {
* @type {{ [key: string]: string; }}
* @memberof IndexModel
*/
tags?: { [key: string]: string; };
tags?: { [key: string]: string; } | null;
/**
*
* @type {IndexModelSpec}
Expand Down
15 changes: 6 additions & 9 deletions src/pinecone-generated-ts-fetch/db_control/models/PodSpec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,13 +37,13 @@ export interface PodSpec {
* @type {number}
* @memberof PodSpec
*/
replicas: number;
replicas?: number;
/**
* The number of shards. Shards split your data across multiple pods so you can fit more data into an index.
* @type {number}
* @memberof PodSpec
*/
shards: number;
shards?: number;
/**
* The type of pod to use. One of `s1`, `p1`, or `p2` appended with `.` and one of `x1`, `x2`, `x4`, or `x8`.
* @type {string}
Expand All @@ -55,7 +55,7 @@ export interface PodSpec {
* @type {number}
* @memberof PodSpec
*/
pods: number;
pods?: number;
/**
*
* @type {PodSpecMetadataConfig}
Expand All @@ -76,10 +76,7 @@ export interface PodSpec {
export function instanceOfPodSpec(value: object): boolean {
let isInstance = true;
isInstance = isInstance && "environment" in value;
isInstance = isInstance && "replicas" in value;
isInstance = isInstance && "shards" in value;
isInstance = isInstance && "podType" in value;
isInstance = isInstance && "pods" in value;

return isInstance;
}
Expand All @@ -95,10 +92,10 @@ export function PodSpecFromJSONTyped(json: any, ignoreDiscriminator: boolean): P
return {

'environment': json['environment'],
'replicas': json['replicas'],
'shards': json['shards'],
'replicas': !exists(json, 'replicas') ? undefined : json['replicas'],
'shards': !exists(json, 'shards') ? undefined : json['shards'],
'podType': json['pod_type'],
'pods': json['pods'],
'pods': !exists(json, 'pods') ? undefined : json['pods'],
'metadataConfig': !exists(json, 'metadata_config') ? undefined : PodSpecMetadataConfigFromJSON(json['metadata_config']),
'sourceCollection': !exists(json, 'source_collection') ? undefined : json['source_collection'],
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ export interface StartImportRequest {
* @type {string}
* @memberof StartImportRequest
*/
uri?: string;
uri: string;
/**
*
* @type {ImportErrorMode}
Expand All @@ -51,6 +51,7 @@ export interface StartImportRequest {
*/
export function instanceOfStartImportRequest(value: object): boolean {
let isInstance = true;
isInstance = isInstance && "uri" in value;

return isInstance;
}
Expand All @@ -66,7 +67,7 @@ export function StartImportRequestFromJSONTyped(json: any, ignoreDiscriminator:
return {

'integrationId': !exists(json, 'integrationId') ? undefined : json['integrationId'],
'uri': !exists(json, 'uri') ? undefined : json['uri'],
'uri': json['uri'],
'errorMode': !exists(json, 'errorMode') ? undefined : ImportErrorModeFromJSON(json['errorMode']),
};
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,8 @@ export class InferenceApi extends runtime.BaseAPI {
}

/**
* Rerank items according to their relevance to a query. For guidance and examples, see [Rerank documents](https://docs.pinecone.io/guides/inference/rerank).
* Rerank items
* Rerank documents according to their relevance to a query. For guidance and examples, see [Rerank documents](https://docs.pinecone.io/guides/inference/rerank).
* Rerank documents
*/
async rerankRaw(requestParameters: RerankOperationRequest, initOverrides?: RequestInit | runtime.InitOverrideFunction): Promise<runtime.ApiResponse<RerankResult>> {
const queryParameters: any = {};
Expand All @@ -109,8 +109,8 @@ export class InferenceApi extends runtime.BaseAPI {
}

/**
* Rerank items according to their relevance to a query. For guidance and examples, see [Rerank documents](https://docs.pinecone.io/guides/inference/rerank).
* Rerank items
* Rerank documents according to their relevance to a query. For guidance and examples, see [Rerank documents](https://docs.pinecone.io/guides/inference/rerank).
* Rerank documents
*/
async rerank(requestParameters: RerankOperationRequest = {}, initOverrides?: RequestInit | runtime.InitOverrideFunction): Promise<RerankResult> {
const response = await this.rerankRaw(requestParameters, initOverrides);
Expand Down

0 comments on commit f27db40

Please sign in to comment.