Skip to content

Commit

Permalink
fix/model-prompt: with auth
Browse files Browse the repository at this point in the history
  • Loading branch information
gptlang committed Jul 16, 2024
1 parent 02d6e9a commit cca4358
Showing 1 changed file with 28 additions and 26 deletions.
54 changes: 28 additions & 26 deletions lua/CopilotChat/copilot.lua
Original file line number Diff line number Diff line change
Expand Up @@ -505,33 +505,35 @@ end
function Copilot:select_model(callback)
local url = 'https://api.githubcopilot.com/models'
local headers = generate_headers(self.token.token, self.sessionid, self.machineid)
curl.get(url, {
headers = headers,
proxy = self.proxy,
insecure = self.allow_insecure,
on_error = function(err)
err = 'Failed to get response: ' .. vim.inspect(err)
log.error(err)
end,
callback = function(response)
if response.status ~= 200 then
local msg = 'Failed to fetch models: ' .. tostring(response.status)
log.error(msg)
return
end
self:with_auth(function()
curl.get(url, {
headers = headers,
proxy = self.proxy,
insecure = self.allow_insecure,
on_error = function(err)
err = 'Failed to get response: ' .. vim.inspect(err)
log.error(err)
end,
callback = function(response)
if response.status ~= 200 then
local msg = 'Failed to fetch models: ' .. tostring(response.status)
log.error(msg)
return
end

local models = vim.json.decode(response.body)['data']
local selections = {}
for _, model in ipairs(models) do
table.insert(selections, model['version'])
end
vim.ui.select(selections, {
prompt = 'Select a model',
}, function(choice)
callback(choice)
end)
end,
})
local models = vim.json.decode(response.body)['data']
local selections = {}
for _, model in ipairs(models) do
table.insert(selections, model['version'])
end
vim.ui.select(selections, {
prompt = 'Select a model',
}, function(choice)
callback(choice)
end)
end,
})
end)
end

--- Generate embeddings for the given inputs
Expand Down

0 comments on commit cca4358

Please sign in to comment.