From 734d38c3f7605754e369d3d2b5a06b2b77ad9a03 Mon Sep 17 00:00:00 2001 From: Tomas Slusny Date: Wed, 20 Nov 2024 11:30:58 +0100 Subject: [PATCH] feat(chat): add no_chat option Add new configuration option for suppressing output and chat history storage. This enables using CopilotChat purely as a backend service by handling responses through callbacks without any UI interaction or state persistence. See #551 Signed-off-by: Tomas Slusny --- README.md | 1 + lua/CopilotChat/config.lua | 2 + lua/CopilotChat/copilot.lua | 20 ++++++--- lua/CopilotChat/init.lua | 81 +++++++++++++++++++++++-------------- 4 files changed, 68 insertions(+), 36 deletions(-) diff --git a/README.md b/README.md index ab32435d..ce109fdb 100644 --- a/README.md +++ b/README.md @@ -449,6 +449,7 @@ Also see [here](/lua/CopilotChat/config.lua): history_path = vim.fn.stdpath('data') .. '/copilotchat_history', -- Default path to stored history callback = nil, -- Callback to use when ask response is received + no_chat = false, -- Do not write to chat buffer and use chat history (useful for using callback for custom processing) -- default selection selection = function(source) diff --git a/lua/CopilotChat/config.lua b/lua/CopilotChat/config.lua index 3f5556a8..581258fb 100644 --- a/lua/CopilotChat/config.lua +++ b/lua/CopilotChat/config.lua @@ -93,6 +93,7 @@ local utils = require('CopilotChat.utils') ---@field highlight_headers boolean? ---@field history_path string? ---@field callback fun(response: string, source: CopilotChat.config.source)? +---@field no_chat boolean? ---@field selection nil|fun(source: CopilotChat.config.source):CopilotChat.config.selection? ---@field contexts table? ---@field prompts table? @@ -127,6 +128,7 @@ return { history_path = vim.fn.stdpath('data') .. '/copilotchat_history', -- Default path to stored history callback = nil, -- Callback to use when ask response is received + no_chat = false, -- Do not write to chat buffer and use history(useful for using callback for custom processing) -- default selection selection = function(source) diff --git a/lua/CopilotChat/copilot.lua b/lua/CopilotChat/copilot.lua index 481838b6..db4ba2cb 100644 --- a/lua/CopilotChat/copilot.lua +++ b/lua/CopilotChat/copilot.lua @@ -10,6 +10,7 @@ ---@field model string? ---@field agent string? ---@field temperature number? +---@field no_history boolean? ---@field on_progress nil|fun(response: string):nil ---@class CopilotChat.copilot.embed.opts @@ -566,6 +567,7 @@ function Copilot:ask(prompt, opts) local model = opts.model or 'gpt-4o-2024-05-13' local agent = opts.agent or 'copilot' local temperature = opts.temperature or 0.1 + local no_history = opts.no_history or false local on_progress = opts.on_progress local job_id = uuid() self.current_job = job_id @@ -578,6 +580,7 @@ function Copilot:ask(prompt, opts) log.debug('Agent: ' .. agent) log.debug('Temperature: ' .. temperature) + local history = no_history and {} or self.history local models = self:fetch_models() local agents = self:fetch_agents() local agent_config = agents[agent] @@ -618,11 +621,11 @@ function Copilot:ask(prompt, opts) -- Calculate how many tokens we can use for history local history_limit = max_tokens - required_tokens - reserved_tokens - local history_tokens = count_history_tokens(self.history) + local history_tokens = count_history_tokens(history) -- If we're over history limit, truncate history from the beginning - while history_tokens > history_limit and #self.history > 0 do - local removed = table.remove(self.history, 1) + while history_tokens > history_limit and #history > 0 do + local removed = table.remove(history, 1) history_tokens = history_tokens - tiktoken.count(removed.content) end @@ -740,7 +743,7 @@ function Copilot:ask(prompt, opts) local is_stream = not vim.startswith(model, 'o1') local body = vim.json.encode( generate_ask_request( - self.history, + history, prompt, system_prompt, generated_messages, @@ -836,16 +839,21 @@ function Copilot:ask(prompt, opts) log.trace('Full response: ' .. full_response) log.debug('Last message: ' .. vim.inspect(last_message)) - table.insert(self.history, { + table.insert(history, { content = prompt, role = 'user', }) - table.insert(self.history, { + table.insert(history, { content = full_response, role = 'assistant', }) + if not no_history then + log.debug('History size increased to ' .. #history) + self.history = history + end + return full_response, last_message and last_message.usage and last_message.usage.total_tokens, max_tokens diff --git a/lua/CopilotChat/init.lua b/lua/CopilotChat/init.lua index 8f4def10..f0b6b750 100644 --- a/lua/CopilotChat/init.lua +++ b/lua/CopilotChat/init.lua @@ -43,14 +43,15 @@ local state = { help = nil, } +---@param config CopilotChat.config ---@return CopilotChat.config.selection? -local function get_selection() +local function get_selection(config) local bufnr = state.source and state.source.bufnr local winnr = state.source and state.source.winnr if - state.config - and state.config.selection + config + and config.selection and utils.buf_valid(bufnr) and winnr and vim.api.nvim_win_is_valid(winnr) @@ -105,7 +106,7 @@ local function highlight_selection(clear) return end - local selection = get_selection() + local selection = get_selection(state.config) if not selection or not selection.start_line or not utils.buf_valid(selection.bufnr) then return end @@ -171,7 +172,7 @@ local function get_diff() local header_filename, header_start_line, header_end_line = match_header(header) -- Initialize variables with selection if available - local selection = get_selection() + local selection = get_selection(state.config) local reference = selection and selection.content local start_line = selection and selection.start_line local end_line = selection and selection.end_line @@ -244,6 +245,10 @@ local function apply_diff(diff) end local function finish(config, message, hide_help, start_of_chat) + if config.no_chat then + return + end + if not start_of_chat then state.chat:append('\n\n') end @@ -274,9 +279,13 @@ local function finish(config, message, hide_help, start_of_chat) end end -local function show_error(err, config, append_newline) +local function show_error(config, err, append_newline) log.error(vim.inspect(err)) + if config.no_chat then + return + end + if type(err) == 'string' then local message = err:match('^[^:]+:[^:]+:(.+)') or err message = message:gsub('^%s*', '') @@ -591,33 +600,39 @@ end function M.ask(prompt, config) config = vim.tbl_deep_extend('force', M.config, config or {}) vim.diagnostic.reset(vim.api.nvim_create_namespace('copilot_diagnostics')) - M.open(config) + + if not config.no_chat then + M.open(config) + end prompt = vim.trim(prompt or '') if prompt == '' then return end - if config.clear_chat_on_new_prompt then - M.stop(true, config) - elseif state.copilot:stop() then - finish(config, nil, true) - end + if not config.no_chat then + if config.clear_chat_on_new_prompt then + M.stop(true, config) + elseif state.copilot:stop() then + finish(config, nil, true) + end - -- Clear the current input prompt before asking a new question - local chat_lines = vim.api.nvim_buf_get_lines(state.chat.bufnr, 0, -1, false) - local _, start_line, end_line = - utils.find_lines(chat_lines, #chat_lines, M.config.separator .. '$', nil, true) - if #chat_lines == end_line then - vim.api.nvim_buf_set_lines(state.chat.bufnr, start_line, end_line, false, { '' }) - end + state.last_prompt = prompt - state.chat:append(prompt) - state.chat:append('\n\n' .. config.answer_header .. config.separator .. '\n\n') + -- Clear the current input prompt before asking a new question + local chat_lines = vim.api.nvim_buf_get_lines(state.chat.bufnr, 0, -1, false) + local _, start_line, end_line = + utils.find_lines(chat_lines, #chat_lines, M.config.separator .. '$', nil, true) + if #chat_lines == end_line then + vim.api.nvim_buf_set_lines(state.chat.bufnr, start_line, end_line, false, { '' }) + end + + state.chat:append(prompt) + state.chat:append('\n\n' .. config.answer_header .. config.separator .. '\n\n') + end -- Resolve prompt references - local system_prompt, updated_prompt = update_prompts(prompt or '', config.system_prompt) - state.last_prompt = prompt + local system_prompt, updated_prompt = update_prompts(prompt, config.system_prompt) -- Remove sticky prefix prompt = table.concat( @@ -665,7 +680,7 @@ function M.ask(prompt, config) local embeddings = vim.tbl_values(embedding_map) -- Retrieve the selection - local selection = get_selection() + local selection = get_selection(config) async.run(function() local agents = vim.tbl_keys(state.copilot:list_agents()) @@ -692,7 +707,7 @@ function M.ask(prompt, config) if not query_ok then vim.schedule(function() - show_error(filtered_embeddings, config, has_output) + show_error(config, filtered_embeddings, has_output) end) return end @@ -705,9 +720,13 @@ function M.ask(prompt, config) model = selected_model, agent = selected_agent, temperature = config.temperature, + no_history = config.no_chat, on_progress = function(token) vim.schedule(function() - state.chat:append(token) + if not config.no_chat then + state.chat:append(token) + end + has_output = true end) end, @@ -715,7 +734,7 @@ function M.ask(prompt, config) if not ask_ok then vim.schedule(function() - show_error(response, config, has_output) + show_error(config, response, has_output) end) return end @@ -724,7 +743,9 @@ function M.ask(prompt, config) return end - state.last_response = response + if not config.no_chat then + state.last_response = response + end vim.schedule(function() if token_count and token_max_count and token_count > 0 then @@ -1086,7 +1107,7 @@ function M.setup(config) map_key(M.config.mappings.quickfix_diffs, bufnr, function() local chat_lines = vim.api.nvim_buf_get_lines(bufnr, 0, -1, false) - local selection = get_selection() + local selection = get_selection(state.config) local items = {} local in_block = false local block_start = 0 @@ -1187,7 +1208,7 @@ function M.setup(config) end) map_key(M.config.mappings.show_user_selection, bufnr, function() - local selection = get_selection() + local selection = get_selection(state.config) if not selection or not selection.content then return end