Skip to content
This repository was archived by the owner on Mar 7, 2025. It is now read-only.

Commit

Permalink
chore: update models in texts and tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Tomas2D committed Nov 10, 2023
1 parent 40871a8 commit f32c2be
Show file tree
Hide file tree
Showing 6 changed files with 15 additions and 15 deletions.
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ The following example showcases how you can integrate GenAI into your project.
import { GenAIModel } from '@ibm-generative-ai/node-sdk/langchain';

const model = new GenAIModel({
modelId: 'google/ul2',
modelId: 'google/flan-ul2',
parameters: {},
configuration: {
apiKey: 'pak-.....',
Expand Down Expand Up @@ -222,7 +222,7 @@ console.log(text); // ArcticAegis
import { GenAIModel } from '@ibm-generative-ai/node-sdk/langchain';

const model = new GenAIModel({
modelId: 'google/ul2',
modelId: 'google/flan-ul2',
stream: true,
parameters: {},
configuration: {
Expand All @@ -245,7 +245,7 @@ await model.call('Tell me a joke.', undefined, [
import { GenAIChatModel } from '@ibm-generative-ai/node-sdk/langchain';

const client = new GenAIChatModel({
modelId: 'togethercomputer/gpt-neoxt-chat-base-20b',
modelId: 'eleutherai/gpt-neox-20b',
stream: false,
configuration: {
endpoint: process.env.ENDPOINT,
Expand Down
4 changes: 2 additions & 2 deletions examples/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ const client = new Client({
apiKey: process.env.GENAI_API_KEY,
});

const model_id = 'google/ul2';
const model_id = 'google/flan-ul2';

{
// Start a conversation
Expand Down Expand Up @@ -69,7 +69,7 @@ const model_id = 'google/ul2';
// Streaming callbacks
client.chat(
{
model_id: 'google/ul2',
model_id: 'google/flan-ul2',
messages: [{ role: 'user', content: 'How are you?' }],
},
{ stream: true },
Expand Down
2 changes: 1 addition & 1 deletion examples/langchain/llm-chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import { GenAIChatModel } from '../../src/langchain/llm-chat.js';

const makeClient = (stream?: boolean) =>
new GenAIChatModel({
modelId: 'togethercomputer/gpt-neoxt-chat-base-20b',
modelId: 'eleutherai/gpt-neox-20b',
stream,
configuration: {
endpoint: process.env.ENDPOINT,
Expand Down
12 changes: 6 additions & 6 deletions src/tests/e2e/client.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ describe('client', () => {
const makeValidStream = (parameters: Record<string, any> = {}) =>
client.generate(
{
model_id: 'google/ul2',
model_id: 'google/flan-ul2',
input: 'Hello, World',
parameters: {
max_new_tokens: 10,
Expand Down Expand Up @@ -89,11 +89,11 @@ describe('client', () => {
client.generate(
[
{
model_id: 'google/ul2',
model_id: 'google/flan-ul2',
input: 'Hello, World',
},
{
model_id: 'google/ul2',
model_id: 'google/flan-ul2',
input: 'Hello, World',
},
] as unknown as GenerateInput,
Expand Down Expand Up @@ -153,7 +153,7 @@ describe('client', () => {
const chunks: GenerateOutput[] = [];
client.generate(
{
model_id: 'google/ul2',
model_id: 'google/flan-ul2',
input: 'Hello, World',
parameters: {},
},
Expand Down Expand Up @@ -215,7 +215,7 @@ describe('client', () => {
const makeValidStream = () =>
client.chat(
{
model_id: 'google/ul2',
model_id: 'google/flan-ul2',
messages: [{ role: 'user', content: 'Hello World!' }],
},
{
Expand Down Expand Up @@ -246,7 +246,7 @@ describe('client', () => {
const chunks: ChatOutput[] = [];
client.chat(
{
model_id: 'google/ul2',
model_id: 'google/flan-ul2',
messages: [{ role: 'user', content: 'Hello World!' }],
},
{
Expand Down
2 changes: 1 addition & 1 deletion src/tests/e2e/langchain/llm-chat.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ describeIf(process.env.RUN_LANGCHAIN_CHAT_TESTS === 'true')(
() => {
const makeClient = (stream?: boolean) =>
new GenAIChatModel({
modelId: 'togethercomputer/gpt-neoxt-chat-base-20b',
modelId: 'eleutherai/gpt-neox-20b',
stream,
configuration: {
endpoint: process.env.ENDPOINT,
Expand Down
4 changes: 2 additions & 2 deletions src/tests/integration/client.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ describe('client', () => {

test('should replace the config', async () => {
const input = {
model_id: 'google/ul2',
model_id: 'google/flan-ul2',
parameters: {
decoding_method: 'greedy',
random_seed: 8,
Expand All @@ -98,7 +98,7 @@ describe('client', () => {

test('should set and reset the config', async () => {
const input = {
model_id: 'google/ul2',
model_id: 'google/flan-ul2',
parameters: {
decoding_method: 'greedy',
random_seed: 8,
Expand Down

0 comments on commit f32c2be

Please sign in to comment.