Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cleanup provider constructors #18

Merged
merged 1 commit into from
Apr 14, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion ai-providers/mistral.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,12 +52,17 @@ class MistralByteSource implements UnderlyingByteSource {
}
}

interface MistralProviderCtorOptions {
model: string
apiKey: string
}

export class MistralProvider implements AiProvider {
model: string
apiKey: string
client?: import('@mistralai/mistralai').default = undefined

constructor (model: string, apiKey: string) {
constructor ({ model, apiKey }: MistralProviderCtorOptions) {
this.model = model
this.apiKey = apiKey
}
Expand Down
7 changes: 6 additions & 1 deletion ai-providers/ollama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,16 @@ class OllamaByteSource implements UnderlyingByteSource {
}
}

interface OllamaProviderCtorOptions {
host: string
model: string
}

export class OllamaProvider implements AiProvider {
model: string
client: Ollama

constructor (host: string, model: string) {
constructor ({ host, model }: OllamaProviderCtorOptions) {
this.model = model
this.client = new Ollama({ host })
}
Expand Down
7 changes: 6 additions & 1 deletion ai-providers/open-ai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -81,11 +81,16 @@ class OpenAiByteSource implements UnderlyingByteSource {
}
}

interface OpenAiProviderCtorOptions {
model: string
apiKey: string
}

export class OpenAiProvider implements AiProvider {
model: string
client: OpenAI

constructor (model: string, apiKey: string) {
constructor ({ model, apiKey }: OpenAiProviderCtorOptions) {
this.model = model
// @ts-expect-error
this.client = new OpenAI({ apiKey, fetch })
Expand Down
9 changes: 3 additions & 6 deletions plugins/warp.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,11 @@ const UnknownAiProviderError = createError('UNKNOWN_AI_PROVIDER', 'Unknown AI Pr

function build (aiProvider: AiWarpConfig['aiProvider']): AiProvider {
if ('openai' in aiProvider) {
const { model, apiKey } = aiProvider.openai
return new OpenAiProvider(model, apiKey)
return new OpenAiProvider(aiProvider.openai)
} else if ('mistral' in aiProvider) {
const { model, apiKey } = aiProvider.mistral
return new MistralProvider(model, apiKey)
return new MistralProvider(aiProvider.mistral)
} else if ('ollama' in aiProvider) {
const { host, model } = aiProvider.ollama
return new OllamaProvider(host, model)
return new OllamaProvider(aiProvider.ollama)
} else {
throw new UnknownAiProviderError()
}
Expand Down
6 changes: 3 additions & 3 deletions tests/unit/ai-providers.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@ import { OllamaProvider } from '../../ai-providers/ollama'
const expectedStreamBody = buildExpectedStreamBodyString()

const providers: AiProvider[] = [
new OpenAiProvider('gpt-3.5-turbo', ''),
new MistralProvider('open-mistral-7b', ''),
new OllamaProvider(OLLAMA_MOCK_HOST, 'some-model')
new OpenAiProvider({ model: 'gpt-3.5-turbo', apiKey: '' }),
new MistralProvider({ model: 'open-mistral-7b', apiKey: '' }),
new OllamaProvider({ host: OLLAMA_MOCK_HOST, model: 'some-model' })
]

for (const provider of providers) {
Expand Down
Loading