Skip to content

Commit

Permalink
Don't reuse LlamaChatSession
Browse files Browse the repository at this point in the history
  • Loading branch information
flakey5 committed May 3, 2024
1 parent 89cedaf commit 8602eb5
Showing 1 changed file with 7 additions and 16 deletions.
23 changes: 7 additions & 16 deletions ai-providers/llama2.ts
Original file line number Diff line number Diff line change
Expand Up @@ -142,32 +142,23 @@ interface Llama2ProviderCtorOptions {
}

export class Llama2Provider implements AiProvider {
modelPath: string
session?: LlamaChatSession
context: LlamaContext

constructor ({ modelPath }: Llama2ProviderCtorOptions) {
this.modelPath = modelPath
const model = new LlamaModel({ modelPath })
this.context = new LlamaContext({ model })
}

async ask (prompt: string): Promise<string> {
if (this.session === undefined) {
const model = new LlamaModel({ modelPath: this.modelPath })
const context = new LlamaContext({ model })
this.session = new LlamaChatSession({ context })
}

const response = await this.session.prompt(prompt)
const session = new LlamaChatSession({ context: this.context })
const response = await session.prompt(prompt)

return response
}

async askStream (prompt: string, chunkCallback?: StreamChunkCallback): Promise<ReadableStream> {
if (this.session === undefined) {
const model = new LlamaModel({ modelPath: this.modelPath })
const context = new LlamaContext({ model })
this.session = new LlamaChatSession({ context })
}
const session = new LlamaChatSession({ context: this.context })

return new ReadableStream(new Llama2ByteSource(this.session, prompt, chunkCallback))
return new ReadableStream(new Llama2ByteSource(session, prompt, chunkCallback))
}
}

0 comments on commit 8602eb5

Please sign in to comment.