Skip to content

Commit

Permalink
fim fim for lmstudio
Browse files Browse the repository at this point in the history
  • Loading branch information
rjmacarthy committed Apr 4, 2024
1 parent bacab06 commit 199ac65
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 5 deletions.
15 changes: 11 additions & 4 deletions src/extension/provider-options.ts
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ export function createStreamRequestBodyFim(
prompt: string,
options: {
temperature: number
numPredictChat: number
numPredictFim: number
model: string
keepAlive?: string | number
}
Expand All @@ -71,23 +71,30 @@ export function createStreamRequestBodyFim(
keep_alive: options.keepAlive,
options: {
temperature: options.temperature,
num_predict: options.numPredictChat
num_predict: options.numPredictFim
}
}
case ApiProviders.LMStudio:
return {
prompt,
stream: true,
temperature: options.temperature,
n_predict: options.numPredictFim
}
case ApiProviders.LlamaCpp:
return {
prompt,
stream: true,
temperature: options.temperature,
n_predict: options.numPredictChat
n_predict: options.numPredictFim
}
case ApiProviders.LiteLLM:
default:
return {
messages: [{ content: prompt, role: USER }],
model: options.model,
stream: true,
max_tokens: options.numPredictChat,
max_tokens: options.numPredictFim,
temperature: options.temperature
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/extension/providers/completion.ts
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ export class CompletionProvider implements InlineCompletionItemProvider {
prompt,
{
model: this._fimModel,
numPredictChat: this._numPredictFim,
numPredictFim: this._numPredictFim,
temperature: this._temperature,
keepAlive: this._keepAlive
}
Expand Down

0 comments on commit 199ac65

Please sign in to comment.