feat: add a token limit and make it configurable
This commit is contained in:
@@ -3,6 +3,7 @@
|
||||
-- 1) Determine the Git root based on the currently opened file.
|
||||
-- 2) If no file is open or not in Git repo, fallback to current working directory.
|
||||
-- 3) Add support for configuring a list of default prompt blocks ("go-development", "typo3-development", "basic-prompt") that can override the initial prompt if provided.
|
||||
-- 4) Add support for configuring a token limit.
|
||||
|
||||
local M = {}
|
||||
local uv = vim.loop
|
||||
@@ -57,7 +58,8 @@ function M.load()
|
||||
local config = {
|
||||
initial_prompt = "",
|
||||
directories = { "." },
|
||||
default_prompt_blocks = {}
|
||||
default_prompt_blocks = {},
|
||||
token_limit = 128000
|
||||
}
|
||||
|
||||
if fd then
|
||||
@@ -76,6 +78,9 @@ function M.load()
|
||||
if type(result.default_prompt_blocks) == "table" then
|
||||
config.default_prompt_blocks = result.default_prompt_blocks
|
||||
end
|
||||
if type(result.token_limit) == "number" then
|
||||
config.token_limit = result.token_limit
|
||||
end
|
||||
end
|
||||
end
|
||||
else
|
||||
|
||||
@@ -24,6 +24,13 @@ local function parse_response(raw)
|
||||
return data
|
||||
end
|
||||
|
||||
-- A simple token estimation function (approx. 4 chars per token)
|
||||
local function estimate_tokens(text)
|
||||
local approx_chars_per_token = 4
|
||||
local length = #text
|
||||
return math.floor(length / approx_chars_per_token)
|
||||
end
|
||||
|
||||
function M.run_chatgpt_command()
|
||||
local conf = config.load()
|
||||
local user_input = vim.fn.input("Message for O1 Model: ")
|
||||
@@ -47,6 +54,15 @@ function M.run_chatgpt_command()
|
||||
table.insert(sections, file_sections)
|
||||
|
||||
local prompt = table.concat(sections, "\n")
|
||||
|
||||
local token_limit = conf.token_limit or 8000
|
||||
local token_count = estimate_tokens(prompt)
|
||||
|
||||
if token_count > token_limit then
|
||||
print("Too many files attached. The request exceeds the O1 model limit of " .. token_limit .. " tokens.")
|
||||
return
|
||||
end
|
||||
|
||||
copy_to_clipboard(prompt)
|
||||
print("Prompt copied to clipboard! Paste it into the ChatGPT O1 model.")
|
||||
end
|
||||
|
||||
Reference in New Issue
Block a user