232 lines
8.3 KiB
Lua
232 lines
8.3 KiB
Lua
-- lua/chatgpt_nvim/init.lua
|
|
local M = {}
|
|
|
|
local context = require('chatgpt_nvim.context')
|
|
local handler = require('chatgpt_nvim.handler')
|
|
local config = require('chatgpt_nvim.config')
|
|
|
|
local ok_yaml, lyaml = pcall(require, "lyaml")
|
|
|
|
local function copy_to_clipboard(text)
|
|
vim.fn.setreg('+', text)
|
|
end
|
|
|
|
local function parse_response(raw)
|
|
if not ok_yaml then
|
|
vim.api.nvim_err_writeln("lyaml not available. Install with `luarocks install lyaml`.")
|
|
return nil
|
|
end
|
|
local ok, data = pcall(lyaml.load, raw)
|
|
if not ok or not data then
|
|
vim.api.nvim_err_writeln("Failed to parse YAML response.")
|
|
return nil
|
|
end
|
|
return data
|
|
end
|
|
|
|
-- A simple token estimation function (approx. 4 chars per token)
|
|
local function estimate_tokens(text)
|
|
local approx_chars_per_token = 4
|
|
local length = #text
|
|
return math.floor(length / approx_chars_per_token)
|
|
end
|
|
|
|
local function is_subpath(root, path)
|
|
local root_abs = vim.fn.fnamemodify(root, ":p")
|
|
local target_abs = vim.fn.fnamemodify(path, ":p")
|
|
return target_abs:sub(1, #root_abs) == root_abs
|
|
end
|
|
|
|
-- The improved workflow:
|
|
--
|
|
-- :ChatGPT:
|
|
-- 1) Create a prompt that includes:
|
|
-- - The project's name and structure, but NOT file contents.
|
|
-- - The initial instructions including that the O1 Model should respond with a YAML listing which files are needed.
|
|
-- 2) Copy that prompt to clipboard.
|
|
--
|
|
-- The O1 Model responds with a YAML listing files it needs.
|
|
--
|
|
-- :ChatGPTPaste:
|
|
-- 1) Parse the YAML from clipboard.
|
|
-- 2) If the YAML lists files needed (no final changes), generate a new prompt that includes the requested files content plus a reminder that O1 Model can ask for more files.
|
|
-- 3) Copy that prompt to clipboard for O1 Model.
|
|
--
|
|
-- The O1 Model can continue to ask for more files. Each time we run :ChatGPTPaste with the new YAML, we provide more files. Eventually, the O1 Model returns the final YAML with `project_name` and `files` changes.
|
|
--
|
|
-- When final YAML with `files` and `project_name` matches current project, we apply the changes.
|
|
--
|
|
-- If more context is needed, we ask user outside the YAML.
|
|
|
|
-- We'll store requested files in a global variable for now, though a more robust solution might be needed.
|
|
local requested_files = {}
|
|
|
|
function M.run_chatgpt_command()
|
|
local conf = config.load()
|
|
local user_input = vim.fn.input("Message for O1 Model: ")
|
|
if user_input == "" then
|
|
print("No input provided.")
|
|
return
|
|
end
|
|
|
|
local dirs = conf.directories or {"."}
|
|
local project_structure = context.get_project_structure(dirs)
|
|
|
|
-- Initial prompt without file contents, asking the O1 model which files are needed.
|
|
-- We'll instruct the O1 Model to respond with a YAML that includes the files it needs.
|
|
-- Format of the O1 model response should be something like:
|
|
-- project_name: <project_name>
|
|
-- files:
|
|
-- - path: "path/to/needed_file"
|
|
--
|
|
-- No changes yet, just asking for which files the model wants.
|
|
|
|
local initial_sections = {
|
|
conf.initial_prompt .. "\n" .. user_input,
|
|
"\n\nProject name: " .. (conf.project_name or "") .. "\n",
|
|
"\n\nProject Structure:\n",
|
|
project_structure,
|
|
"\n\nPlease respond with a YAML listing which files you need from the project. For example:\n",
|
|
"project_name: " .. (conf.project_name or "") .. "\nfiles:\n - path: \"relative/path/to/file\"\n\n"
|
|
}
|
|
|
|
local prompt = table.concat(initial_sections, "\n")
|
|
|
|
local token_limit = conf.token_limit or 8000
|
|
local token_count = estimate_tokens(prompt)
|
|
|
|
if token_count > token_limit then
|
|
print("Too many files in project structure. The request exceeds the O1 model limit of " .. token_limit .. " tokens.")
|
|
return
|
|
end
|
|
|
|
copy_to_clipboard(prompt)
|
|
print("Prompt (requesting needed files) copied to clipboard! Paste it into the ChatGPT O1 model.")
|
|
end
|
|
|
|
function M.run_chatgpt_paste_command()
|
|
print("Reading ChatGPT YAML response from clipboard...")
|
|
local conf = config.load()
|
|
local raw = handler.get_clipboard_content()
|
|
if raw == "" then
|
|
vim.api.nvim_err_writeln("Clipboard is empty. Please copy the YAML response from ChatGPT first.")
|
|
return
|
|
end
|
|
|
|
local data = parse_response(raw)
|
|
if not data then
|
|
return
|
|
end
|
|
|
|
-- Check if this is the final answer (with modifications) or just requesting more files.
|
|
if data.project_name and data.files then
|
|
-- The O1 model provided a YAML with files. We must check if these files contain content to apply changes.
|
|
-- If 'delete: true' or 'content' is found, that means it's the final set of changes.
|
|
-- If only paths are listed without 'content' or 'delete', that means the model is asking for files.
|
|
|
|
local is_final = false
|
|
for _, fileinfo in ipairs(data.files) do
|
|
if fileinfo.content or fileinfo.delete == true then
|
|
is_final = true
|
|
break
|
|
end
|
|
end
|
|
|
|
if is_final then
|
|
-- Final changes: apply them
|
|
if data.project_name ~= conf.project_name then
|
|
vim.api.nvim_err_writeln("Project name mismatch. The provided changes are for project '" ..
|
|
(data.project_name or "unknown") .. "' but current project is '" ..
|
|
(conf.project_name or "unconfigured") .. "'. Aborting changes.")
|
|
return
|
|
end
|
|
|
|
local root = vim.fn.getcwd()
|
|
|
|
for _, fileinfo in ipairs(data.files) do
|
|
if not fileinfo.path then
|
|
vim.api.nvim_err_writeln("Invalid file entry. Must have 'path'.")
|
|
goto continue
|
|
end
|
|
|
|
-- Ensure the path is within the project root
|
|
if not is_subpath(root, fileinfo.path) then
|
|
vim.api.nvim_err_writeln("Invalid file path outside project root: " .. fileinfo.path)
|
|
goto continue
|
|
end
|
|
|
|
if fileinfo.delete == true then
|
|
handler.delete_file(fileinfo.path)
|
|
print("Deleted file: " .. fileinfo.path)
|
|
elseif fileinfo.content then
|
|
handler.write_file(fileinfo.path, fileinfo.content)
|
|
print("Wrote file: " .. fileinfo.path)
|
|
else
|
|
vim.api.nvim_err_writeln("Invalid file entry. Must have 'content' or 'delete' set to true for final changes.")
|
|
end
|
|
::continue::
|
|
end
|
|
|
|
return
|
|
else
|
|
-- Not final: the model is requesting these files. We must gather and send them.
|
|
local dirs = conf.directories or {"."}
|
|
local all_files = context.get_project_files(dirs)
|
|
local requested_paths = {}
|
|
for _, fileinfo in ipairs(data.files) do
|
|
if fileinfo.path then
|
|
table.insert(requested_paths, fileinfo.path)
|
|
end
|
|
end
|
|
|
|
local file_sections = {}
|
|
local root = vim.fn.getcwd()
|
|
|
|
for _, f in ipairs(requested_paths) do
|
|
local full_path = root .. "/" .. f
|
|
local fd = vim.loop.fs_open(full_path, "r", 438)
|
|
if fd then
|
|
local stat = vim.loop.fs_fstat(fd)
|
|
if stat then
|
|
local content = vim.loop.fs_read(fd, stat.size, 0)
|
|
vim.loop.fs_close(fd)
|
|
if content then
|
|
table.insert(file_sections, "\nFile: `" .. f .. "`\n```\n" .. content .. "\n```\n")
|
|
end
|
|
else
|
|
vim.loop.fs_close(fd)
|
|
end
|
|
else
|
|
table.insert(file_sections, "\nFile: `" .. f .. "`\n```\n(Could not read file)\n```\n")
|
|
end
|
|
end
|
|
|
|
-- Create a new prompt including the requested files plus a reminder that the model can ask for more.
|
|
local sections = {
|
|
conf.initial_prompt,
|
|
"\n\nProject name: " .. (conf.project_name or ""),
|
|
"\n\nBelow are the requested files from the project, each preceded by its filename in backticks and enclosed in triple backticks.\n",
|
|
table.concat(file_sections, "\n"),
|
|
"\n\nIf you need more files, please respond again in YAML listing additional files. If you have all information you need, provide the final YAML with `project_name` and `files` (with `content` or `delete`) to apply changes.\n"
|
|
}
|
|
|
|
local prompt = table.concat(sections, "\n")
|
|
|
|
local token_limit = conf.token_limit or 8000
|
|
local token_count = estimate_tokens(prompt)
|
|
|
|
if token_count > token_limit then
|
|
vim.api.nvim_err_writeln("Too many requested files. Exceeds token limit.")
|
|
return
|
|
end
|
|
|
|
copy_to_clipboard(prompt)
|
|
print("Prompt (with requested files) copied to clipboard! Paste it into the ChatGPT O1 model.")
|
|
end
|
|
else
|
|
vim.api.nvim_err_writeln("Invalid response. Expected 'project_name' and 'files'.")
|
|
end
|
|
end
|
|
|
|
return M
|