Skip to content

Commit

Permalink
chore(v2): update examples (#83)
Browse files Browse the repository at this point in the history
Signed-off-by: Tomas Pilar <[email protected]>
  • Loading branch information
pilartomas authored Feb 23, 2024
1 parent ebff49c commit 9d32dc9
Show file tree
Hide file tree
Showing 13 changed files with 155 additions and 203 deletions.
4 changes: 0 additions & 4 deletions examples/assets/generate_input.jsonl

This file was deleted.

59 changes: 13 additions & 46 deletions examples/chat.ts
Original file line number Diff line number Diff line change
@@ -1,18 +1,15 @@
import { Client } from '../src/index.js';

import { CHAT_MODEL } from './constants.js';

const client = new Client({
apiKey: process.env.GENAI_API_KEY,
});

const model_id = 'google/flan-ul2';

{
// Start a conversation
const {
conversation_id,
result: { generated_text: answer1 },
} = await client.chat({
model_id,
const { conversation_id, results: results1 } = await client.text.chat.create({
model_id: CHAT_MODEL,
messages: [
{
role: 'system',
Expand All @@ -24,59 +21,29 @@ const model_id = 'google/flan-ul2';
},
],
});
console.log(answer1);
console.log(results1[0]);

// Continue the conversation
const {
result: { generated_text: answer2 },
} = await client.chat({
const { results: results2 } = await client.text.chat.create({
conversation_id,
model_id,
model_id: CHAT_MODEL,
messages: [
{
role: 'user',
content: 'Are you sure?',
},
],
});
console.log(answer2);
console.log(results2[0]);
}

{
// Chat inteface has the same promise, streaming and callback variants as generate interface

// Promise
const data = await client.chat({
model_id,
// Stream
const stream = await client.text.chat.create_stream({
model_id: CHAT_MODEL,
messages: [{ role: 'user', content: 'How are you?' }],
});
console.log(data.result.generated_text);
// Callback
client.chat(
{ model_id, messages: [{ role: 'user', content: 'How are you?' }] },
(err, data) => {
if (err) console.error(err);
else console.log(data.result.generated_text);
},
);
// Stream
for await (const chunk of client.chat(
{ model_id, messages: [{ role: 'user', content: 'How are you?' }] },
{ stream: true },
)) {
console.log(chunk.result.generated_text);
for await (const chunk of stream) {
console.log(chunk.results?.at(0)?.generated_text);
}
// Streaming callbacks
client.chat(
{
model_id: 'google/flan-ul2',
messages: [{ role: 'user', content: 'How are you?' }],
},
{ stream: true },
(err, data) => {
if (err) console.error(err);
else if (data) console.log(data.result.generated_text);
else console.log('EOS');
},
);
}
2 changes: 2 additions & 0 deletions examples/constants.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
export const MODEL = 'google/flan-ul2';
export const CHAT_MODEL = 'meta-llama/llama-2-70b-chat';
45 changes: 25 additions & 20 deletions examples/file.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { createReadStream, createWriteStream } from 'node:fs';
import { pipeline } from 'node:stream/promises';
import { createReadStream } from 'node:fs';
import { blob } from 'node:stream/consumers';

import { Client } from '../src/index.js';

Expand All @@ -9,36 +9,41 @@ const client = new Client({

{
// List all files
for await (const file of client.files()) {
console.log(file);
let totalCount = Infinity;
const limit = 100;
for (let offset = 0; offset < totalCount; offset += limit) {
const { results, total_count } = await client.file.list({
limit,
offset,
});
for (const file of results) {
console.log(file);
}
totalCount = total_count;
}
}

{
// List all files via callback interface
client.files((err, file) => {
if (err) console.error(err);
console.log(file);
});
}

{
// Upload a file
const newFile = await client.file({
const { result } = await client.file.create({
purpose: 'tune',
filename: 'tune_input.jsonl',
file: createReadStream('examples/assets/tune_input.jsonl'),
file: {
name: 'tune_input.jsonl',
content: (await blob(
createReadStream('examples/assets/tune_input.jsonl'),
)) as any,
},
});
console.log(newFile);
console.log(result);

// Show details of a file
const file = await client.file({ id: newFile.id });
const file = await client.file.retrieve({ id: result.id });
console.log(file);

// Download the file's content
const content = await file.download();
await pipeline(content, createWriteStream('/dev/null'));
const content = await client.file.read({ id: result.id });
console.log(await content.text());

// Delete the file
await client.file({ id: file.id }, { delete: true });
await client.file.delete({ id: result.id });
}
96 changes: 14 additions & 82 deletions examples/generate.ts
Original file line number Diff line number Diff line change
@@ -1,96 +1,28 @@
import { Client } from '../src/index.js';

import { loadGenerateInput } from './load_input.js';
import { MODEL } from './constants.js';

const client = new Client({
apiKey: process.env.GENAI_API_KEY,
});

const multipleInputs = loadGenerateInput();
const singleInput = multipleInputs[0];
const input = { model_id: MODEL, input: 'How are you?' };

// {
// // Use with a single input to get a promise
// const output = await client.generate(singleInput);
// console.log(output);
// }

// {
// // Or supply a callback
// client.generate(singleInput, (err, output) => {
// if (err) console.error(err);
// else console.log(output);
// });
// }

// {
// // Use with multiple inputs to get a promise
// const outputs = await Promise.all(client.generate(multipleInputs));
// console.log(outputs);

// // Or supply a callback which will be called for each output
// // Callback is guaranteed to be called in the order of respective inputs
// client.generate(multipleInputs, (err, output) => {
// if (err) console.error(err);
// else console.log(output);
// });

// // The method is optimized for sequential await, order the inputs accordingly
// for (const outputPromise of client.generate(multipleInputs)) {
// try {
// console.log(await outputPromise);
// } catch (err) {
// console.error(err);
// }
// }
// }

// {
// // Streaming (callback style)
// client.generate(
// singleInput,
// {
// stream: true,
// },
// (err, output) => {
// if (err) {
// console.error(err);
// } else if (output === null) {
// // END of stream
// } else {
// console.log(output.stop_reason);
// console.log(output.generated_token_count);
// console.log(output.input_token_count);
// console.log(output.generated_text);
// }
// },
// );
// }
{
const output = await client.text.generation.create(input);
console.log(output);
}

{
// Streaming (async iterators)
const stream = client.generation_stream(singleInput);
const stream = await client.text.generation.create_stream(input);
for await (const chunk of stream) {
console.log(chunk.stop_reason);
console.log(chunk.generated_token_count);
console.log(chunk.input_token_count);
console.log(chunk.generated_text);
const result = chunk.results?.at(0);
if (result) {
console.log(result.stop_reason);
console.log(result.generated_token_count);
console.log(result.input_token_count);
console.log(result.generated_text);
}
}
}

{
// Streaming (built-in stream methods)
const stream = client.generation_stream(singleInput);
stream.on('data', (chunk) => {
console.log(chunk.stop_reason);
console.log(chunk.generated_token_count);
console.log(chunk.input_token_count);
console.log(chunk.generated_text);
});
stream.on('error', (err) => {
console.error('error has occurred', err);
});
stream.on('close', () => {
console.info('end of stream');
});
}
25 changes: 17 additions & 8 deletions examples/history.ts
Original file line number Diff line number Diff line change
@@ -1,23 +1,32 @@
import { Client } from '../src/index.js';

import { CHAT_MODEL } from './constants.js';

const client = new Client({
apiKey: process.env.GENAI_API_KEY,
});

{
// List historical success requests to the API
for await (const request of client.history({
origin: 'API',
status: 'SUCCESS',
})) {
const { results } = await client.request.list({
origin: 'api',
status: 'success',
});
for (const request of results) {
console.log(request);
}
}

{
// List all requests from the past via callback interface
client.history((err, request) => {
if (err) console.error(err);
console.log(request);
// List all requests related to a chat conversation
const { conversation_id } = await client.text.chat.create({
model_id: CHAT_MODEL,
messages: [{ role: 'user', content: 'How are you?' }],
});
const { results } = await client.request.chat({
conversationId: conversation_id,
});
for (const request of results) {
console.log(request);
}
}
11 changes: 0 additions & 11 deletions examples/load_input.ts

This file was deleted.

15 changes: 13 additions & 2 deletions examples/models.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,19 @@
import { Client } from '../src/index.js';

import { MODEL } from './constants.js';

const client = new Client({
apiKey: process.env.GENAI_API_KEY,
});

const models = await client.models({ limit: 100, offset: 0 });
console.log(models);
{
// List first hundred models
const { results } = await client.model.list({ limit: 100, offset: 0 });
console.log(results);
}

{
// Retrieve info about a specific model
const { result } = await client.model.retrieve({ id: MODEL });
console.log(result);
}
Loading

0 comments on commit 9d32dc9

Please sign in to comment.