feat: changed the workflow to a more interactive way

This commit is contained in:
2024-12-13 23:45:05 +01:00
parent cc37c8505c
commit f2c6f60d03

View File

@@ -37,6 +37,30 @@ local function is_subpath(root, path)
return target_abs:sub(1, #root_abs) == root_abs
end
-- The improved workflow:
--
-- :ChatGPT:
-- 1) Create a prompt that includes:
-- - The project's name and structure, but NOT file contents.
-- - The initial instructions including that the O1 Model should respond with a YAML listing which files are needed.
-- 2) Copy that prompt to clipboard.
--
-- The O1 Model responds with a YAML listing files it needs.
--
-- :ChatGPTPaste:
-- 1) Parse the YAML from clipboard.
-- 2) If the YAML lists files needed (no final changes), generate a new prompt that includes the requested files content plus a reminder that O1 Model can ask for more files.
-- 3) Copy that prompt to clipboard for O1 Model.
--
-- The O1 Model can continue to ask for more files. Each time we run :ChatGPTPaste with the new YAML, we provide more files. Eventually, the O1 Model returns the final YAML with `project_name` and `files` changes.
--
-- When final YAML with `files` and `project_name` matches current project, we apply the changes.
--
-- If more context is needed, we ask user outside the YAML.
-- We'll store requested files in a global variable for now, though a more robust solution might be needed.
local requested_files = {}
function M.run_chatgpt_command()
local conf = config.load()
local user_input = vim.fn.input("Message for O1 Model: ")
@@ -47,31 +71,37 @@ function M.run_chatgpt_command()
local dirs = conf.directories or {"."}
local project_structure = context.get_project_structure(dirs)
local all_files = context.get_project_files(dirs)
local file_sections = context.get_file_contents(all_files)
local sections = {
-- Initial prompt without file contents, asking the O1 model which files are needed.
-- We'll instruct the O1 Model to respond with a YAML that includes the files it needs.
-- Format of the O1 model response should be something like:
-- project_name: <project_name>
-- files:
-- - path: "path/to/needed_file"
--
-- No changes yet, just asking for which files the model wants.
local initial_sections = {
conf.initial_prompt .. "\n" .. user_input,
"\n\nProject name: " .. (conf.project_name or "") .. "\n",
"\n\nProject Structure:\n",
project_structure,
"\n\nBelow are the files from the project, each preceded by its filename in backticks and enclosed in triple backticks.\n"
"\n\nPlease respond with a YAML listing which files you need from the project. For example:\n",
"project_name: " .. (conf.project_name or "") .. "\nfiles:\n - path: \"relative/path/to/file\"\n\n"
}
table.insert(sections, file_sections)
local prompt = table.concat(sections, "\n")
local prompt = table.concat(initial_sections, "\n")
local token_limit = conf.token_limit or 8000
local token_count = estimate_tokens(prompt)
if token_count > token_limit then
print("Too many files attached. The request exceeds the O1 model limit of " .. token_limit .. " tokens.")
print("Too many files in project structure. The request exceeds the O1 model limit of " .. token_limit .. " tokens.")
return
end
copy_to_clipboard(prompt)
print("Prompt copied to clipboard! Paste it into the ChatGPT O1 model.")
print("Prompt (requesting needed files) copied to clipboard! Paste it into the ChatGPT O1 model.")
end
function M.run_chatgpt_paste_command()
@@ -84,12 +114,27 @@ function M.run_chatgpt_paste_command()
end
local data = parse_response(raw)
if not data or not data.files then
vim.api.nvim_err_writeln("No 'files' field found in the YAML response.")
if not data then
return
end
if not data.project_name or data.project_name ~= conf.project_name then
-- Check if this is the final answer (with modifications) or just requesting more files.
if data.project_name and data.files then
-- The O1 model provided a YAML with files. We must check if these files contain content to apply changes.
-- If 'delete: true' or 'content' is found, that means it's the final set of changes.
-- If only paths are listed without 'content' or 'delete', that means the model is asking for files.
local is_final = false
for _, fileinfo in ipairs(data.files) do
if fileinfo.content or fileinfo.delete == true then
is_final = true
break
end
end
if is_final then
-- Final changes: apply them
if data.project_name ~= conf.project_name then
vim.api.nvim_err_writeln("Project name mismatch. The provided changes are for project '" ..
(data.project_name or "unknown") .. "' but current project is '" ..
(conf.project_name or "unconfigured") .. "'. Aborting changes.")
@@ -117,10 +162,70 @@ function M.run_chatgpt_paste_command()
handler.write_file(fileinfo.path, fileinfo.content)
print("Wrote file: " .. fileinfo.path)
else
vim.api.nvim_err_writeln("Invalid file entry. Must have 'content' or 'delete' set to true.")
vim.api.nvim_err_writeln("Invalid file entry. Must have 'content' or 'delete' set to true for final changes.")
end
::continue::
end
return
else
-- Not final: the model is requesting these files. We must gather and send them.
local dirs = conf.directories or {"."}
local all_files = context.get_project_files(dirs)
local requested_paths = {}
for _, fileinfo in ipairs(data.files) do
if fileinfo.path then
table.insert(requested_paths, fileinfo.path)
end
end
local file_sections = {}
local root = vim.fn.getcwd()
for _, f in ipairs(requested_paths) do
local full_path = root .. "/" .. f
local fd = vim.loop.fs_open(full_path, "r", 438)
if fd then
local stat = vim.loop.fs_fstat(fd)
if stat then
local content = vim.loop.fs_read(fd, stat.size, 0)
vim.loop.fs_close(fd)
if content then
table.insert(file_sections, "\nFile: `" .. f .. "`\n```\n" .. content .. "\n```\n")
end
else
vim.loop.fs_close(fd)
end
else
table.insert(file_sections, "\nFile: `" .. f .. "`\n```\n(Could not read file)\n```\n")
end
end
-- Create a new prompt including the requested files plus a reminder that the model can ask for more.
local sections = {
conf.initial_prompt,
"\n\nProject name: " .. (conf.project_name or ""),
"\n\nBelow are the requested files from the project, each preceded by its filename in backticks and enclosed in triple backticks.\n",
table.concat(file_sections, "\n"),
"\n\nIf you need more files, please respond again in YAML listing additional files. If you have all information you need, provide the final YAML with `project_name` and `files` (with `content` or `delete`) to apply changes.\n"
}
local prompt = table.concat(sections, "\n")
local token_limit = conf.token_limit or 8000
local token_count = estimate_tokens(prompt)
if token_count > token_limit then
vim.api.nvim_err_writeln("Too many requested files. Exceeds token limit.")
return
end
copy_to_clipboard(prompt)
print("Prompt (with requested files) copied to clipboard! Paste it into the ChatGPT O1 model.")
end
else
vim.api.nvim_err_writeln("Invalid response. Expected 'project_name' and 'files'.")
end
end
return M