Skip to content

Commit e73cb24

Browse files
fix(providers): no top_p n and temperature for -codex gpt models
* providers - no top_p n and temperature for -codex gpt models * Update lua/CopilotChat/config/providers.lua * Update lua/CopilotChat/config/providers.lua --------- Co-authored-by: Tomas Slusny <slusnucky@gmail.com>
1 parent be32610 commit e73cb24

1 file changed

Lines changed: 2 additions & 1 deletion

File tree

lua/CopilotChat/config/providers.lua

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -278,6 +278,7 @@ end
278278
---@return table
279279
local function prepare_chat_input(inputs, opts)
280280
local is_o1 = vim.startswith(opts.model.id, 'o1')
281+
local is_codex = opts.model.id:find('codex') ~= nil
281282

282283
inputs = vim.tbl_map(function(input)
283284
local output = {
@@ -324,7 +325,7 @@ local function prepare_chat_input(inputs, opts)
324325
end, opts.tools)
325326
end
326327

327-
if not is_o1 then
328+
if not is_o1 and not is_codex then
328329
out.n = 1
329330
out.top_p = 1
330331
out.temperature = opts.temperature

0 commit comments

Comments
 (0)