Note
Plugin was rewritten to Lua from Python. Please check the migration guide from version 1 to version 2 for more information.
Ensure you have the following installed:
- Neovim stable (0.9.5) or nightly.
Optional:
- tiktoken_core:
sudo luarocks install --lua-version 5.1 tiktoken_core
. Alternatively, download a pre-built binary from lua-tiktoken releases - You can check your Lua PATH in Neovim by doing
:lua print(package.cpath)
. Save the binary astiktoken_core.so
in any of the given paths.
For Arch Linux user, you can install
luajit-tiktoken-bin
orlua51-tiktoken-bin
from aur!
return {
{
"CopilotC-Nvim/CopilotChat.nvim",
branch = "canary",
dependencies = {
{ "zbirenbaum/copilot.lua" }, -- or github/copilot.vim
{ "nvim-lua/plenary.nvim" }, -- for curl, log wrapper
},
build = "make tiktoken", -- Only on MacOS or Linux
opts = {
debug = true, -- Enable debugging
-- See Configuration section for rest
},
-- See Commands section for default commands if you want to lazy load on them
},
}
See @jellydn for configuration
Similar to the lazy setup, you can use the following configuration:
call plug#begin()
Plug 'zbirenbaum/copilot.lua'
Plug 'nvim-lua/plenary.nvim'
Plug 'CopilotC-Nvim/CopilotChat.nvim', { 'branch': 'canary' }
call plug#end()
lua << EOF
require("CopilotChat").setup {
debug = true, -- Enable debugging
-- See Configuration section for rest
}
EOF
- Put the files in the right place
mkdir -p ~/.config/nvim/pack/copilotchat/start
cd ~/.config/nvim/pack/copilotchat/start
git clone https://github.com/zbirenbaum/copilot.lua
git clone https://github.com/nvim-lua/plenary.nvim
git clone -b canary https://github.com/CopilotC-Nvim/CopilotChat.nvim
- Add to your configuration (e.g.
~/.config/nvim/init.lua
)
require("CopilotChat").setup {
debug = true, -- Enable debugging
-- See Configuration section for rest
}
See @deathbeam for configuration
:CopilotChat <input>?
- Open chat window with optional input:CopilotChatOpen
- Open chat window:CopilotChatClose
- Close chat window:CopilotChatToggle
- Toggle chat window:CopilotChatStop
- Stop current copilot output:CopilotChatReset
- Reset chat window:CopilotChatSave <name>?
- Save chat history to file:CopilotChatLoad <name>?
- Load chat history from file:CopilotChatDebugInfo
- Show debug information:CopilotChatModels
- View and select available models. This is reset when a new instance is made. Please set your model ininit.lua
for persistence.:CopilotChatModel
- View the currently selected model.
:CopilotChatExplain
- Write an explanation for the active selection as paragraphs of text:CopilotChatReview
- Review the selected code:CopilotChatFix
- There is a problem in this code. Rewrite the code to show it with the bug fixed:CopilotChatOptimize
- Optimize the selected code to improve performance and readability:CopilotChatDocs
- Please add documentation comment for the selection:CopilotChatTests
- Please generate tests for my code:CopilotChatFixDiagnostic
- Please assist with the following diagnostic issue in file:CopilotChatCommit
- Write commit message for the change with commitizen convention:CopilotChatCommitStaged
- Write commit message for the change with commitizen convention
local chat = require("CopilotChat")
-- Open chat window
chat.open()
-- Open chat window with custom options
chat.open({
window = {
layout = 'float',
title = 'My Title',
},
})
-- Close chat window
chat.close()
-- Toggle chat window
chat.toggle()
-- Toggle chat window with custom options
chat.toggle({
window = {
layout = 'float',
title = 'My Title',
},
})
-- Reset chat window
chat.reset()
-- Ask a question
chat.ask("Explain how it works.")
-- Ask a question with custom options
chat.ask("Explain how it works.", {
selection = require("CopilotChat.select").buffer,
})
-- Ask a question and do something with the response
chat.ask("Show me something interesting", {
callback = function(response)
print("Response:", response)
end,
})
-- Get all available prompts (can be used for integrations like fzf/telescope)
local prompts = chat.prompts()
-- Get last copilot response (also can be used for integrations and custom keymaps)
local response = chat.response()
-- Pick a prompt using vim.ui.select
local actions = require("CopilotChat.actions")
-- Pick help actions
actions.pick(actions.help_actions())
-- Pick prompt actions
actions.pick(actions.prompt_actions({
selection = require("CopilotChat.select").visual,
}))
Also see here:
{
debug = false, -- Enable debug logging
proxy = nil, -- [protocol://]host[:port] Use this proxy
allow_insecure = false, -- Allow insecure server connections
system_prompt = prompts.COPILOT_INSTRUCTIONS, -- System prompt to use
model = 'gpt-4o', -- GPT model to use, 'gpt-3.5-turbo', 'gpt-4', or 'gpt-4o'
temperature = 0.1, -- GPT temperature
question_header = '## User ', -- Header to use for user questions
answer_header = '## Copilot ', -- Header to use for AI answers
error_header = '## Error ', -- Header to use for errors
separator = '───', -- Separator to use in chat
show_folds = true, -- Shows folds for sections in chat
show_help = true, -- Shows help message as virtual lines when waiting for user input
auto_follow_cursor = true, -- Auto-follow cursor in chat
auto_insert_mode = false, -- Automatically enter insert mode when opening window and on new prompt
insert_at_end = false, -- Move cursor to end of buffer when inserting text
clear_chat_on_new_prompt = false, -- Clears chat on every new prompt
highlight_selection = true, -- Highlight selection in the source buffer when in the chat window
context = nil, -- Default context to use, 'buffers', 'buffer' or none (can be specified manually in prompt via @).
history_path = vim.fn.stdpath('data') .. '/copilotchat_history', -- Default path to stored history
callback = nil, -- Callback to use when ask response is received
-- default selection (visual or line)
selection = function(source)
return select.visual(source) or select.line(source)
end,
-- default prompts
prompts = {
Explain = {
prompt = '/COPILOT_EXPLAIN Write an explanation for the active selection as paragraphs of text.',
},
Review = {
prompt = '/COPILOT_REVIEW Review the selected code.',
callback = function(response, source)
-- see config.lua for implementation
end,
},
Fix = {
prompt = '/COPILOT_GENERATE There is a problem in this code. Rewrite the code to show it with the bug fixed.',
},
Optimize = {
prompt = '/COPILOT_GENERATE Optimize the selected code to improve performance and readability.',
},
Docs = {
prompt = '/COPILOT_GENERATE Please add documentation comment for the selection.',
},
Tests = {
prompt = '/COPILOT_GENERATE Please generate tests for my code.',
},
FixDiagnostic = {
prompt = 'Please assist with the following diagnostic issue in file:',
selection = select.diagnostics,
},
Commit = {
prompt = 'Write commit message for the change with commitizen convention. Make sure the title has maximum 50 characters and message is wrapped at 72 characters. Wrap the whole message in code block with language gitcommit.',
selection = select.gitdiff,
},
CommitStaged = {
prompt = 'Write commit message for the change with commitizen convention. Make sure the title has maximum 50 characters and message is wrapped at 72 characters. Wrap the whole message in code block with language gitcommit.',
selection = function(source)
return select.gitdiff(source, true)
end,
},
},
-- default window options
window = {
layout = 'vertical', -- 'vertical', 'horizontal', 'float', 'replace'
width = 0.5, -- fractional width of parent, or absolute width in columns when > 1
height = 0.5, -- fractional height of parent, or absolute height in rows when > 1
-- Options below only apply to floating windows
relative = 'editor', -- 'editor', 'win', 'cursor', 'mouse'
border = 'single', -- 'none', single', 'double', 'rounded', 'solid', 'shadow'
row = nil, -- row position of the window, default is centered
col = nil, -- column position of the window, default is centered
title = 'Copilot Chat', -- title of chat window
footer = nil, -- footer of chat window
zindex = 1, -- determines if window is on top or below other floating windows
},
-- default mappings
mappings = {
complete = {
detail = 'Use @<Tab> or /<Tab> for options.',
insert ='<Tab>',
},
close = {
normal = 'q',
insert = '<C-c>'
},
reset = {
normal ='<C-l>',
insert = '<C-l>'
},
submit_prompt = {
normal = '<CR>',
insert = '<C-s>'
},
accept_diff = {
normal = '<C-y>',
insert = '<C-y>'
},
yank_diff = {
normal = 'gy',
register = '"',
},
show_diff = {
normal = 'gd'
},
show_system_prompt = {
normal = 'gp'
},
show_user_selection = {
normal = 'gs'
},
},
}
For further reference, you can view @jellydn's configuration.
This will define prompt that you can reference with /MyCustomPrompt
in chat, call with :CopilotChatMyCustomPrompt
or use the keymap <leader>ccmc
.
It will use visual selection as default selection. If you are using lazy.nvim
and are already lazy loading based on Commands
make sure to include the prompt
commands and keymaps in cmd
and keys
respectively.
{
prompts = {
MyCustomPrompt = {
prompt = 'Explain how it works.',
mapping = '<leader>ccmc',
description = 'My custom prompt description',
selection = require('CopilotChat.select').visual,
},
},
}
You can reference system or user prompts in your configuration or in chat with /PROMPT_NAME
slash notation.
For collection of default COPILOT_
(system) and USER_
(user) prompts, see here.
{
prompts = {
MyCustomPrompt = {
prompt = '/COPILOT_EXPLAIN Explain how it works.',
},
MyCustomPrompt2 = {
prompt = '/MyCustomPrompt Include some additional context.',
},
},
}
You can define custom system prompts by using system_prompt
property when passing config around.
{
system_prompt = 'Your name is Github Copilot and you are a AI assistant for developers.',
prompts = {
Johnny = {
system_prompt = 'Your name is Johny Microsoft and you are not an AI assistant for developers.',
prompt = 'Explain how it works.',
},
Yarrr = {
system_prompt = 'You are fascinated by pirates, so please respond in pirate speak.'
},
},
}
To use any of your custom prompts, simply do :CopilotChat<prompt name>
. E.g. :CopilotChatJohnny
or :CopilotChatYarrr What is a sorting algo?
. Tab autocomplete will help you out.
You can set local options for the buffers that are created by this plugin: copilot-diff
, copilot-system-prompt
, copilot-user-selection
, copilot-chat
.
vim.api.nvim_create_autocmd('BufEnter', {
pattern = 'copilot-*',
callback = function()
vim.opt_local.relativenumber = true
-- C-p to print last response
vim.keymap.set('n', '<C-p>', function()
print(require("CopilotChat").response())
end, { buffer = true, remap = true })
end
})
Quick chat with your buffer
To chat with Copilot using the entire content of the buffer, you can add the following configuration to your keymap:
-- lazy.nvim keys
-- Quick chat with Copilot
{
"<leader>ccq",
function()
local input = vim.fn.input("Quick Chat: ")
if input ~= "" then
require("CopilotChat").ask(input, { selection = require("CopilotChat.select").buffer })
end
end,
desc = "CopilotChat - Quick chat",
}
Inline chat
Change the window layout to float
and position relative to cursor to make the window look like inline chat.
This will allow you to chat with Copilot without opening a new window.
-- lazy.nvim opts
{
window = {
layout = 'float',
relative = 'cursor',
width = 1,
height = 0.4,
row = 1
}
}
Telescope integration
Requires telescope.nvim plugin to be installed.
-- lazy.nvim keys
-- Show help actions with telescope
{
"<leader>cch",
function()
local actions = require("CopilotChat.actions")
require("CopilotChat.integrations.telescope").pick(actions.help_actions())
end,
desc = "CopilotChat - Help actions",
},
-- Show prompts actions with telescope
{
"<leader>ccp",
function()
local actions = require("CopilotChat.actions")
require("CopilotChat.integrations.telescope").pick(actions.prompt_actions())
end,
desc = "CopilotChat - Prompt actions",
},
fzf-lua integration
Requires fzf-lua plugin to be installed.
-- lazy.nvim keys
-- Show help actions with fzf-lua
{
"<leader>cch",
function()
local actions = require("CopilotChat.actions")
require("CopilotChat.integrations.fzflua").pick(actions.help_actions())
end,
desc = "CopilotChat - Help actions",
},
-- Show prompts actions with fzf-lua
{
"<leader>ccp",
function()
local actions = require("CopilotChat.actions")
require("CopilotChat.integrations.fzflua").pick(actions.prompt_actions())
end,
desc = "CopilotChat - Prompt actions",
},
nvim-cmp integration
Requires nvim-cmp plugin to be installed (and properly configured).
-- Registers copilot-chat source and enables it for copilot-chat filetype (so copilot chat window)
require("CopilotChat.integrations.cmp").setup()
-- You might also want to disable default <tab> complete mapping for copilot chat when doing this
require('CopilotChat').setup({
mappings = {
complete = {
insert = '',
},
},
-- rest of your config
})
- Use indexed vector database with current workspace for better context selection
- General QOL improvements
For development, you can use the provided Makefile command to install the pre-commit tool:
make install-pre-commit
This will install the pre-commit tool and the pre-commit hooks.
If you want to contribute to this project, please read the CONTRIBUTING.md file.
Thanks goes to these wonderful people (emoji key):
This project follows the all-contributors specification. Contributions of any kind are welcome!