Files
chatgpt.vim/lua/chatgpt_nvim/config.lua

183 lines
6.5 KiB
Lua
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
local M = {}
local uv = vim.loop
local ok_yaml, lyaml = pcall(require, "lyaml")
local prompt_blocks = {
["go-development"] = [[
You are a coding assistant specialized in Go development.
You will receive a projects context and user instructions related to Go code.
Keep your suggestions aligned with Go best practices and idiomatic Go.
]],
["typo3-development"] = [[
You are a coding assistant specialized in TYPO3 development.
You have access to the projects context and the users instructions.
Your answers should focus on TYPO3 coding guidelines, extension development best practices,
and TSconfig or TypoScript recommendations.
]],
["rust-development"] = [[
You are a coding assistant specialized in Rust development.
You will receive a projects context and user instructions related to Rust code.
Keep your suggestions aligned with Rust best practices and idiomatic Rust.
]],
["basic-prompt"] = [[
You are a coding assistant who receives a project's context and user instructions.
The user will provide a prompt, and you will guide them through a workflow:
1. Analyse which files you need. Ask for file contents in YAML if needed.
2. Request additional context outside YAML if necessary.
3. When ready, provide final changes in YAML with:
- 'project_name'
- 'files', each having:
* 'path'
* 'diff' (for patching an existing file) OR 'content' (for a new file) OR 'delete'
Important: do not provide entire file content for updates; instead provide a unified diff in 'diff'.
Only modify or delete files whose contents you have explicitly requested and seen beforehand.
]],
["secure-coding"] = [[
You are a coding assistant specialized in secure software development.
Always consider security impacts. Use diffs for updates, new content for new files,
and 'delete: true' for removals.
]],
["workflow-prompt"] = [[
You are a coding assistant focusing on making the Neovim ChatGPT workflow straightforward and user-friendly.
Remind the user to:
- List needed files for further context
- Request additional information outside YAML if needed
- Provide final changes in YAML with 'project_name' and 'files', using:
* 'diff' for existing file modifications
* 'content' for new files
* 'delete: true' for file deletions
]]
}
local function get_project_root()
local current_file = vim.fn.expand("%:p")
local root_dir
if current_file == "" then
root_dir = vim.fn.getcwd()
else
local file_dir = current_file:match("(.*)/")
if not file_dir then
root_dir = vim.fn.getcwd()
else
local cmd = string.format("cd %s && git rev-parse --show-toplevel 2>/dev/null", vim.fn.shellescape(file_dir))
local git_root = vim.fn.systemlist(cmd)
if vim.v.shell_error == 0 and git_root and #git_root > 0 then
root_dir = git_root[1]
else
root_dir = file_dir
end
end
end
return root_dir
end
local function get_config_path()
local root = get_project_root()
return root .. "/.chatgpt_config.yaml"
end
function M.load()
local path = get_config_path()
local fd = uv.fs_open(path, "r", 438)
local config = {
initial_prompt = "",
directories = { "." },
default_prompt_blocks = {},
token_limit = 16384,
project_name = "",
debug = false,
initial_files = {},
preview_changes = false,
interactive_file_selection = false,
partial_acceptance = false,
improved_debug = false,
enable_chunking = false,
enable_step_by_step = true,
enable_debug_commands = false
}
if fd then
local stat = uv.fs_fstat(fd)
local data = uv.fs_read(fd, stat.size, 0)
uv.fs_close(fd)
if data and ok_yaml then
local ok, result = pcall(lyaml.load, data)
if ok and type(result) == "table" then
if type(result.initial_prompt) == "string" then
config.initial_prompt = result.initial_prompt
end
if type(result.directories) == "table" then
config.directories = result.directories
end
if type(result.default_prompt_blocks) == "table" then
config.default_prompt_blocks = result.default_prompt_blocks
end
if type(result.token_limit) == "number" then
config.token_limit = result.token_limit
end
if type(result.project_name) == "string" then
config.project_name = result.project_name
end
if type(result.debug) == "boolean" then
config.debug = result.debug
end
if type(result.initial_files) == "table" then
config.initial_files = result.initial_files
end
if type(result.preview_changes) == "boolean" then
config.preview_changes = result.preview_changes
end
if type(result.interactive_file_selection) == "boolean" then
config.interactive_file_selection = result.interactive_file_selection
end
if type(result.partial_acceptance) == "boolean" then
config.partial_acceptance = result.partial_acceptance
end
if type(result.improved_debug) == "boolean" then
config.improved_debug = result.improved_debug
end
if type(result.enable_chunking) == "boolean" then
config.enable_chunking = result.enable_chunking
end
if type(result.enable_step_by_step) == "boolean" then
config.enable_step_by_step = result.enable_step_by_step
end
if type(result.enable_debug_commands) == "boolean" then
config.enable_debug_commands = result.enable_debug_commands
end
end
end
else
config.initial_prompt = "You are a coding assistant who receives a project's context and user instructions..."
end
if type(config.default_prompt_blocks) == "table" and #config.default_prompt_blocks > 0 then
local merged_prompt = {}
for _, block_name in ipairs(config.default_prompt_blocks) do
if prompt_blocks[block_name] then
table.insert(merged_prompt, prompt_blocks[block_name])
end
end
if #merged_prompt > 0 then
local combined_blocks = table.concat(merged_prompt, "\n\n")
if config.initial_prompt ~= "" then
config.initial_prompt = config.initial_prompt .. "\n\n" .. combined_blocks
else
config.initial_prompt = combined_blocks
end
end
end
if config.debug then
vim.api.nvim_out_write("[chatgpt_nvim:config] Loaded config from: " .. path .. "\n")
vim.api.nvim_out_write("[chatgpt_nvim:config] Debug logging is enabled.\n")
end
return config
end
return M