Skip to content

Commit

Permalink
feat: add show context command for Copilot Chat
Browse files Browse the repository at this point in the history
Add new command to show context of the current chat message that maps to `gc` by
default. Unify existing overlay windows into single overlay implementation and
improve resolution of prompts and embeddings by extracting them into separate
functions. The overlay now also shows truncated preview of longer files.

This change improves debugging experience when working with contextual prompts
by allowing users to inspect what files and contexts are being used for the
current prompt.
  • Loading branch information
deathbeam committed Nov 21, 2024
1 parent a3d2429 commit d99aef3
Show file tree
Hide file tree
Showing 4 changed files with 109 additions and 78 deletions.
7 changes: 7 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,7 @@ See @deathbeam for [configuration](https://github.com/deathbeam/dotfiles/blob/ma
- `gd` - Show diff between source and nearest diff
- `gp` - Show system prompt for current chat
- `gs` - Show current user selection
- `gc` - Show current user context
- `gh` - Show help message

The mappings can be customized by setting the `mappings` table in your configuration. Each mapping can have:
Expand Down Expand Up @@ -558,6 +559,12 @@ Also see [here](/lua/CopilotChat/config.lua):
show_user_selection = {
normal = 'gs'
},
show_user_context = {
normal = 'gc',
},
show_help = {
normal = 'gh',
},
},
}
```
Expand Down
4 changes: 4 additions & 0 deletions lua/CopilotChat/config.lua
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ local utils = require('CopilotChat.utils')
---@field show_diff CopilotChat.config.mapping?
---@field show_system_prompt CopilotChat.config.mapping?
---@field show_user_selection CopilotChat.config.mapping?
---@field show_user_context CopilotChat.config.mapping?
---@field show_help CopilotChat.config.mapping?

--- CopilotChat default configuration
Expand Down Expand Up @@ -390,6 +391,9 @@ return {
show_user_selection = {
normal = 'gs',
},
show_user_context = {
normal = 'gc',
},
show_help = {
normal = 'gh',
},
Expand Down
172 changes: 94 additions & 78 deletions lua/CopilotChat/init.lua
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,7 @@ local plugin_name = 'CopilotChat.nvim'
--- @field last_response string?
--- @field chat CopilotChat.Chat?
--- @field diff CopilotChat.Diff?
--- @field system_prompt CopilotChat.Overlay?
--- @field user_selection CopilotChat.Overlay?
--- @field overlay CopilotChat.Overlay?
--- @field help CopilotChat.Overlay?
local state = {
copilot = nil,
Expand All @@ -38,9 +37,7 @@ local state = {
-- Overlays
chat = nil,
diff = nil,
system_prompt = nil,
user_selection = nil,
help = nil,
overlay = nil,
}

---@param config CopilotChat.config
Expand Down Expand Up @@ -196,7 +193,7 @@ end
---@param prompt string
---@param system_prompt string
---@return string, string
local function update_prompts(prompt, system_prompt)
local function resolve_prompts(prompt, system_prompt)
local prompts_to_use = M.prompts()
local try_again = false
local result = string.gsub(prompt, [[/[%w_]+]], function(match)
Expand All @@ -219,12 +216,61 @@ local function update_prompts(prompt, system_prompt)
end)

if try_again then
return update_prompts(result, system_prompt)
return resolve_prompts(result, system_prompt)
end

return system_prompt, result
end

---@param prompt string
---@param config CopilotChat.config
---@return table<CopilotChat.copilot.embed>, string
local function resolve_embeddings(prompt, config)
local embedding_map = {}
local function parse_context(prompt_context)
local split = vim.split(prompt_context, ':')
local context_name = table.remove(split, 1)
local context_input = vim.trim(table.concat(split, ':'))
local context_value = config.contexts[context_name]
if context_input == '' then
context_input = nil
end

if context_value then
for _, embedding in ipairs(context_value.resolve(context_input, state.source)) do
if embedding then
embedding_map[embedding.filename] = embedding
end
end

prompt = prompt:gsub('#' .. prompt_context .. '%s*', '')
end
end

-- Sort and parse contexts
local contexts = {}
if config.context then
if type(config.context) == 'table' then
for _, config_context in ipairs(config.context) do
table.insert(contexts, config_context)
end
else
table.insert(contexts, config.context)
end
end
for prompt_context in prompt:gmatch('#([^%s]+)') do
table.insert(contexts, prompt_context)
end
table.sort(contexts, function(a, b)
return #a > #b
end)
for _, prompt_context in ipairs(contexts) do
parse_context(prompt_context)
end

return vim.tbl_values(embedding_map), prompt
end

---@param config CopilotChat.config
---@param message string?
---@param hide_help boolean?
Expand Down Expand Up @@ -639,58 +685,19 @@ function M.ask(prompt, config)
end

-- Resolve prompt references
local system_prompt, updated_prompt = update_prompts(prompt, config.system_prompt)
local system_prompt, resolved_prompt = resolve_prompts(prompt, config.system_prompt)

-- Remove sticky prefix
prompt = table.concat(
vim.tbl_map(function(l)
return l:gsub('>%s+', '')
end, vim.split(updated_prompt, '\n')),
end, vim.split(resolved_prompt, '\n')),
'\n'
)

local embedding_map = {}
local function parse_context(prompt_context)
local split = vim.split(prompt_context, ':')
local context_name = table.remove(split, 1)
local context_input = vim.trim(table.concat(split, ':'))
local context_value = config.contexts[context_name]
if context_input == '' then
context_input = nil
end

if context_value then
for _, embedding in ipairs(context_value.resolve(context_input, state.source)) do
if embedding then
embedding_map[embedding.filename] = embedding
end
end

prompt = prompt:gsub('#' .. prompt_context .. '%s*', '')
end
end

-- Sort and parse contexts
local contexts = {}
if config.context then
if type(config.context) == 'table' then
for _, config_context in ipairs(config.context) do
table.insert(contexts, config_context)
end
else
table.insert(contexts, config.context)
end
end
for prompt_context in prompt:gmatch('#([^%s]+)') do
table.insert(contexts, prompt_context)
end
table.sort(contexts, function(a, b)
return #a > #b
end)
for _, prompt_context in ipairs(contexts) do
parse_context(prompt_context)
end
local embeddings = vim.tbl_values(embedding_map)
-- Resolve embeddings
local embeddings, embedded_prompt = resolve_embeddings(prompt, config)
prompt = embedded_prompt

-- Retrieve the selection
local selection = get_selection(config)
Expand Down Expand Up @@ -954,30 +961,12 @@ function M.setup(config)
end)
end)

if state.system_prompt then
state.system_prompt:delete()
end
state.system_prompt = Overlay('copilot-system-prompt', overlay_help, function(bufnr)
map_key(M.config.mappings.close, bufnr, function()
state.system_prompt:restore(state.chat.winnr, state.chat.bufnr)
end)
end)

if state.user_selection then
state.user_selection:delete()
if state.overlay then
state.overlay:delete()
end
state.user_selection = Overlay('copilot-user-selection', overlay_help, function(bufnr)
state.overlay = Overlay('copilot-overlay', overlay_help, function(bufnr)
map_key(M.config.mappings.close, bufnr, function()
state.user_selection:restore(state.chat.winnr, state.chat.bufnr)
end)
end)

if state.help then
state.help:delete()
end
state.help = Overlay('copilot-help', overlay_help, function(bufnr)
map_key(M.config.mappings.close, bufnr, function()
state.help:restore(state.chat.winnr, state.chat.bufnr)
state.overlay:restore(state.chat.winnr, state.chat.bufnr)
end)
end)

Expand Down Expand Up @@ -1015,7 +1004,7 @@ function M.setup(config)
end
end
end
state.help:show(chat_help, 'markdown', state.chat.winnr)
state.overlay:show(chat_help, 'markdown', state.chat.winnr)
end)

map_key(M.config.mappings.reset, bufnr, M.reset)
Expand Down Expand Up @@ -1167,12 +1156,16 @@ function M.setup(config)
end)

map_key(M.config.mappings.show_system_prompt, bufnr, function()
local prompt = state.config.system_prompt
if not prompt then
local section = state.chat:get_closest_section()
local system_prompt = state.config.system_prompt
if section and not section.answer then
system_prompt = resolve_prompts(section.content, state.config.system_prompt)
end
if not system_prompt then
return
end

state.system_prompt:show(vim.trim(prompt) .. '\n', 'markdown', state.chat.winnr)
state.overlay:show(vim.trim(system_prompt) .. '\n', 'markdown', state.chat.winnr)
end)

map_key(M.config.mappings.show_user_selection, bufnr, function()
Expand All @@ -1181,7 +1174,30 @@ function M.setup(config)
return
end

state.user_selection:show(selection.content .. '\n', selection.filetype, state.chat.winnr)
state.overlay:show(selection.content, selection.filetype, state.chat.winnr)
end)

map_key(M.config.mappings.show_user_context, bufnr, function()
local section = state.chat:get_closest_section()
local embeddings = {}
if section and not section.answer then
embeddings = resolve_embeddings(section.content, state.config)
end

local text = ''
for _, embedding in ipairs(embeddings) do
local lines = vim.split(embedding.content, '\n')
local preview = table.concat(vim.list_slice(lines, 1, math.min(10, #lines)), '\n')
local header = string.format('**`%s`** (%s lines)', embedding.filename, #lines)
if #lines > 10 then
header = header .. ' (truncated)'
end

text = text
.. string.format('%s\n```%s\n%s\n```\n\n', header, embedding.filetype, preview)
end

state.overlay:show(vim.trim(text) .. '\n', 'markdown', state.chat.winnr)
end)

vim.api.nvim_create_autocmd({ 'BufEnter', 'BufLeave' }, {
Expand Down
4 changes: 4 additions & 0 deletions lua/CopilotChat/overlay.lua
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,10 @@ function Overlay:validate()
end

function Overlay:show(text, filetype, winnr, syntax)
if not text or vim.trim(text) == '' then
return
end

self:validate()
text = text .. '\n'

Expand Down

0 comments on commit d99aef3

Please sign in to comment.