diff --git a/lua/chatgpt_nvim/init.lua b/lua/chatgpt_nvim/init.lua index c05e987..39e78f9 100644 --- a/lua/chatgpt_nvim/init.lua +++ b/lua/chatgpt_nvim/init.lua @@ -62,105 +62,114 @@ local function is_directory(path) return stat and stat.type == "directory" end --- Replaces the inline input() call with opening a new buffer for prompt input +--------------------------------------------------------------------------- +-- Updated run_chatgpt_command using BufWriteCmd to avoid creating a file -- +--------------------------------------------------------------------------- function M.run_chatgpt_command() local conf = config.load() if conf.debug then vim.api.nvim_out_write("[chatgpt_nvim:init] Running :ChatGPT command.\n") end - -- Create a new scratch buffer for user to type the message - local bufnr = vim.api.nvim_create_buf(false, true) - vim.api.nvim_buf_set_name(bufnr, "ChatGPT_Prompt") - vim.api.nvim_buf_set_option(bufnr, 'bufhidden', 'wipe') - vim.api.nvim_buf_set_option(bufnr, 'filetype', 'markdown') + -- Create a normal, listed buffer so :w / :wq will work + local bufnr = vim.api.nvim_create_buf(false, false) + -- Assign a filename so Vim treats it like a normal file, but we intercept writes + vim.api.nvim_buf_set_name(bufnr, "ChatGPT_Prompt.md") + vim.api.nvim_buf_set_option(bufnr, "filetype", "markdown") + vim.api.nvim_buf_set_option(bufnr, "bufhidden", "wipe") + vim.api.nvim_buf_set_option(bufnr, "buftype", "") + vim.api.nvim_buf_set_option(bufnr, "modifiable", true) + + -- Set some initial placeholder lines vim.api.nvim_buf_set_lines(bufnr, 0, -1, false, { "# Enter your prompt below.", "", "Save & close with :wq to finalize your prompt." }) - local function on_write_post() - -- Read buffer lines and join them into a single string - local lines = vim.api.nvim_buf_get_lines(bufnr, 0, -1, false) - -- Skip the first lines that are placeholders, if desired - local user_input = table.concat(lines, "\n") + -- Intercept the write so that no file is actually created on disk + vim.api.nvim_create_autocmd("BufWriteCmd", { + buffer = bufnr, + callback = function() + -- Gather lines + local lines = vim.api.nvim_buf_get_lines(bufnr, 0, -1, false) + local user_input = table.concat(lines, "\n") - if user_input == "" or user_input:find("^# Enter your prompt below.") then - print("No valid input provided.") - return - end + -- Basic check to ensure user actually wrote something + if user_input == "" or user_input:find("^# Enter your prompt below.") then + vim.api.nvim_out_write("No valid input provided.\n") + -- Mark buffer as unmodified, so :wq can still exit + vim.api.nvim_buf_set_option(bufnr, "modified", false) + return + end - -- Continue the same logic as originally, just using our new user_input - local dirs = conf.directories or {"."} - local project_structure = context.get_project_structure(dirs) + -- Build the prompt using the user_input + local dirs = conf.directories or {"."} + local project_structure = context.get_project_structure(dirs) - local initial_files = conf.initial_files or {} - local included_sections = {} + local initial_files = conf.initial_files or {} + local included_sections = {} - if #initial_files > 0 then - table.insert(included_sections, "\n\nIncluded files and directories (pre-selected):\n") - local root = vim.fn.getcwd() - for _, item in ipairs(initial_files) do - local full_path = root .. "/" .. item - if is_directory(full_path) then - local dir_files = context.get_project_files({item}) - for _, f in ipairs(dir_files) do - local path = root .. "/" .. f - local data = read_file(path) - if data then - table.insert(included_sections, "\nFile: `" .. f .. "`\n```\n" .. data .. "\n```\n") + if #initial_files > 0 then + table.insert(included_sections, "\n\nIncluded files and directories (pre-selected):\n") + local root = vim.fn.getcwd() + for _, item in ipairs(initial_files) do + local full_path = root .. "/" .. item + if is_directory(full_path) then + local dir_files = context.get_project_files({item}) + for _, f in ipairs(dir_files) do + local path = root .. "/" .. f + local data = read_file(path) + if data then + table.insert(included_sections, "\nFile: `" .. f .. "`\n```\n" .. data .. "\n```\n") + end + end + else + local data = read_file(full_path) + if data then + table.insert(included_sections, "\nFile: `" .. item .. "`\n```\n" .. data .. "\n```\n") end - end - else - local data = read_file(full_path) - if data then - table.insert(included_sections, "\nFile: `" .. item .. "`\n```\n" .. data .. "\n```\n") end end end - end - local initial_sections = { - "### Basic Prompt Instructions:\n", - conf.initial_prompt .. "\n\n\n", - "### User Instructions:\n", - user_input .. "\n\n\n", - "### Context/Data:\n", - "Project name: " .. (conf.project_name or "") .. "\n", - "Project Structure:\n", - project_structure, - table.concat(included_sections, "\n") - } + local initial_sections = { + "### Basic Prompt Instructions:\n", + conf.initial_prompt .. "\n\n\n", + "### User Instructions:\n", + user_input .. "\n\n\n", + "### Context/Data:\n", + "Project name: " .. (conf.project_name or "") .. "\n", + "Project Structure:\n", + project_structure, + table.concat(included_sections, "\n") + } - local prompt = table.concat(initial_sections, "\n") + local prompt = table.concat(initial_sections, "\n") - local token_limit = conf.token_limit or 8000 - local token_count = estimate_tokens(prompt) + local token_limit = conf.token_limit or 8000 + local token_count = estimate_tokens(prompt) - if conf.debug then - vim.api.nvim_out_write("[chatgpt_nvim:init] Prompt token count: " .. token_count .. "\n") - end + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:init] Prompt token count: " .. token_count .. "\n") + end - if token_count > token_limit then - print("Too many files in project structure. The request exceeds the O1 model limit of " .. token_limit .. " tokens.") - return - end + if token_count > token_limit then + vim.api.nvim_out_write("Too many files in project structure. The request exceeds the O1 model limit of " .. token_limit .. " tokens.\n") + else + copy_to_clipboard(prompt) + vim.api.nvim_out_write("Prompt (requesting needed files) copied to clipboard! Paste it into the ChatGPT O1 model.\n") + end - copy_to_clipboard(prompt) - print("Prompt (requesting needed files) copied to clipboard! Paste it into the ChatGPT O1 model.") - end - - -- Create an autocmd that triggers once when user saves the buffer (BufWritePost) - vim.api.nvim_create_autocmd("BufWritePost", { - buffer = bufnr, - once = true, - callback = on_write_post + -- Mark as unmodified so :wq won't complain + vim.api.nvim_buf_set_option(bufnr, "modified", false) + end, }) -- Switch to the newly created buffer vim.cmd("buffer " .. bufnr) end +--------------------------------------------------------------------------- function M.run_chatgpt_paste_command() local conf = config.load()