Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use gpt-4o by default #377

Merged
merged 2 commits into from
Jul 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ Also see [here](/lua/CopilotChat/config.lua):
allow_insecure = false, -- Allow insecure server connections

system_prompt = prompts.COPILOT_INSTRUCTIONS, -- System prompt to use
model = 'gpt-4', -- GPT model to use, 'gpt-3.5-turbo' or 'gpt-4'
model = 'gpt-4o', -- GPT model to use, 'gpt-3.5-turbo', 'gpt-4', or 'gpt-4o'
temperature = 0.1, -- GPT temperature

question_header = '## User ', -- Header to use for user questions
Expand Down
2 changes: 1 addition & 1 deletion lua/CopilotChat/config.lua
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ return {
allow_insecure = false, -- Allow insecure server connections

system_prompt = prompts.COPILOT_INSTRUCTIONS, -- System prompt to use
model = 'gpt-4', -- GPT model to use, 'gpt-3.5-turbo' or 'gpt-4'
model = 'gpt-4o-2024-05-13', -- GPT model to use, 'gpt-3.5-turbo', 'gpt-4', or `gpt-4o-2024-05-13`
temperature = 0.1, -- GPT temperature

question_header = '## User ', -- Header to use for user questions
Expand Down
2 changes: 1 addition & 1 deletion lua/CopilotChat/copilot.lua
Original file line number Diff line number Diff line change
Expand Up @@ -353,7 +353,7 @@ function Copilot:ask(prompt, opts)
local start_row = opts.start_row or 0
local end_row = opts.end_row or 0
local system_prompt = opts.system_prompt or prompts.COPILOT_INSTRUCTIONS
local model = opts.model or 'gpt-4'
local model = opts.model or 'gpt-4o-2024-05-13'
local temperature = opts.temperature or 0.1
local on_done = opts.on_done
local on_progress = opts.on_progress
Expand Down
3 changes: 3 additions & 0 deletions lua/CopilotChat/init.lua
Original file line number Diff line number Diff line change
Expand Up @@ -606,6 +606,9 @@ function M.setup(config)
end

M.config = vim.tbl_deep_extend('force', default_config, config or {})
if M.config.model == 'gpt-4o' then
M.config.model = 'gpt-4o-2024-05-13'
end

if state.copilot then
state.copilot:stop()
Expand Down
2 changes: 1 addition & 1 deletion lua/CopilotChat/tiktoken.lua
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ end
local function load_tiktoken_data(done, model)
local tiktoken_url = 'https://openaipublic.blob.core.windows.net/encodings/cl100k_base.tiktoken'
-- If model is gpt-4o, use o200k_base.tiktoken
if model == 'gpt-4o' then
if vim.startswith(model, 'gpt-4o') then
tiktoken_url = 'https://openaipublic.blob.core.windows.net/encodings/o200k_base.tiktoken'
end
local async
Expand Down
Loading