feat: change to token calculation again
This commit is contained in:
@@ -8,7 +8,7 @@ ignore_files:
|
||||
- "*.log"
|
||||
- "vendor/"
|
||||
|
||||
include_file_contents: false
|
||||
include_file_contents: true
|
||||
|
||||
debug: false
|
||||
improved_debug: false
|
||||
|
||||
@@ -87,6 +87,7 @@ function M.load()
|
||||
"No config file found (tried .chatgpt_config.yaml, chatgpt_config.yaml). Using defaults.",
|
||||
vim.log.levels.WARN
|
||||
)
|
||||
config.max_token = 2048
|
||||
return config
|
||||
end
|
||||
|
||||
@@ -120,7 +121,7 @@ function M.load()
|
||||
if type(result.initial_files) == "table" then
|
||||
config.initial_files = result.initial_files
|
||||
end
|
||||
if type(result.include_file_contents) == "boolean" then -- LOAD NEW FLAG
|
||||
if type(result.include_file_contents) == "boolean" then
|
||||
config.include_file_contents = result.include_file_contents
|
||||
end
|
||||
if type(result.preview_changes) == "boolean" then
|
||||
@@ -153,10 +154,17 @@ function M.load()
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if type(result.max_token) == "number" then
|
||||
config.max_token = result.max_token
|
||||
else
|
||||
config.max_token = 2048
|
||||
end
|
||||
end
|
||||
end
|
||||
else
|
||||
config.initial_prompt = "You are a coding assistant who receives a project's context and user instructions..."
|
||||
config.max_token = 2048
|
||||
end
|
||||
|
||||
-- Merge default prompt blocks
|
||||
|
||||
@@ -173,15 +173,20 @@ local function build_prompt(user_input, dirs, conf)
|
||||
table.insert(final_sections, table.concat(env_lines, "\n"))
|
||||
|
||||
local final_prompt = table.concat(final_sections, "\n\n")
|
||||
-- Replace placeholder "%PROJECT_NAME%" with the actual project name from configuration
|
||||
final_prompt = final_prompt:gsub("%%PROJECT_NAME%%", conf.project_name)
|
||||
return final_prompt
|
||||
end
|
||||
|
||||
-- New token estimation function.
|
||||
local function estimate_token_count(text)
|
||||
-- Use a simple heuristic: assume an average of 4 characters per token.
|
||||
return math.floor(#text / 4)
|
||||
end
|
||||
|
||||
local function handle_step_by_step_if_needed(prompt, conf)
|
||||
local length = #prompt
|
||||
local limit = conf.prompt_char_limit or 8000
|
||||
if (not conf.enable_step_by_step) or (length <= limit) then
|
||||
local token_count = estimate_token_count(prompt)
|
||||
local limit = conf.max_token or 2048
|
||||
if (not conf.enable_step_by_step) or (token_count <= limit) then
|
||||
return { prompt }
|
||||
end
|
||||
return { prompts["step-prompt"] }
|
||||
@@ -208,7 +213,6 @@ local function run_chatgpt_command()
|
||||
local bufnr = vim.api.nvim_create_buf(false, false)
|
||||
vim.api.nvim_buf_set_name(bufnr, "ChatGPT_Prompt.md")
|
||||
vim.api.nvim_buf_set_option(bufnr, "filetype", "markdown")
|
||||
-- Set omnifunc for file name auto-completion
|
||||
vim.api.nvim_buf_set_option(bufnr, "omnifunc", "v:lua.chatgpt_file_complete")
|
||||
vim.api.nvim_buf_set_option(bufnr, "bufhidden", "wipe")
|
||||
vim.api.nvim_buf_set_option(bufnr, "buftype", "")
|
||||
@@ -280,9 +284,7 @@ local function run_chatgpt_paste_command()
|
||||
return
|
||||
end
|
||||
|
||||
-- Check if we have tools
|
||||
if data.tools then
|
||||
-- Must also verify project name
|
||||
if not data.project_name or data.project_name ~= conf.project_name then
|
||||
vim.api.nvim_err_writeln("Project name mismatch or missing. Aborting tool usage.")
|
||||
return
|
||||
@@ -294,7 +296,6 @@ local function run_chatgpt_paste_command()
|
||||
return
|
||||
end
|
||||
|
||||
-- If we see project_name & files => older YAML style. We handle it but it's discouraged now.
|
||||
if data.project_name and data.files then
|
||||
if data.project_name ~= conf.project_name then
|
||||
vim.api.nvim_err_writeln("Project name mismatch. Aborting.")
|
||||
@@ -361,7 +362,6 @@ local function run_chatgpt_paste_command()
|
||||
end
|
||||
end
|
||||
else
|
||||
-- Not final => user is requesting more files
|
||||
local requested_paths = {}
|
||||
local root = vim.fn.getcwd()
|
||||
for _, fileinfo in ipairs(data.files) do
|
||||
@@ -459,7 +459,6 @@ M.run_chatgpt_command = run_chatgpt_command
|
||||
M.run_chatgpt_paste_command = run_chatgpt_paste_command
|
||||
M.run_chatgpt_current_buffer_command = run_chatgpt_current_buffer_command
|
||||
|
||||
-- New: Global function for file name auto-completion in ChatGPT prompt
|
||||
function _G.chatgpt_file_complete(findstart, base)
|
||||
if findstart == 1 then
|
||||
local line = vim.fn.getline('.')
|
||||
|
||||
Reference in New Issue
Block a user