From 162ab2d82054897ac0d371d7047811abcd510ab5 Mon Sep 17 00:00:00 2001 From: Dominik Polakovics Date: Sun, 29 Dec 2024 16:39:58 +0100 Subject: [PATCH] fix: buffer handling --- README.md | 11 +++++----- lua/chatgpt_nvim/init.lua | 46 +++++++++++++++++++++++++++------------ lua/chatgpt_nvim/ui.lua | 30 +++++++++++++++++++------ 3 files changed, 61 insertions(+), 26 deletions(-) diff --git a/README.md b/README.md index bcd00ae..7e5f3d6 100644 --- a/README.md +++ b/README.md @@ -43,15 +43,15 @@ token_limit: 3000 ## Usage 1. **`:ChatGPT`** - - If `interactive_file_selection` is on, you’ll pick directories to include. - - A buffer `ChatGPT_Prompt.md` opens to type your instructions. - - Save & close with `:wq` → If `enable_chunking` is on and the prompt exceeds `token_limit`, it’s split into multiple chunks. Each chunk is opened in a buffer, and the first one is copied to your clipboard. + - If `interactive_file_selection` is on, you’ll pick directories to include in a buffer named `ChatGPT_File_Selection`. + - Save & close with `:wq`, `:x`, or `:bd` (you don’t have to use `:q`). + - If `enable_chunking` is on and the prompt exceeds `token_limit`, it’s split into multiple buffers for you to copy/paste. 2. **Paste Prompt to ChatGPT** - If multiple chunks exist, copy/paste them one by one in ChatGPT. 3. **`:ChatGPTPaste`** - - The plugin reads the YAML from your clipboard. If it requests more files, the plugin might chunk that request too if large. + - The plugin reads the YAML from your clipboard. If it requests more files, it might chunk that request, too. - If final changes are provided: - Optionally preview them (`preview_changes`). - Optionally partially accept them (`partial_acceptance`). @@ -62,5 +62,6 @@ token_limit: 3000 - If partial acceptance is confusing, remember to remove or prepend `#` to lines you don’t want before saving and closing the buffer. - If chunking occurs, ensure you copy/paste **all chunks** into ChatGPT in the correct order. - Check `ChatGPT_Debug_Log` if `improved_debug` is on, or the Neovim messages if `debug` is on, for detailed info. +- You can close the selection or prompt buffers at any time with commands like `:bd`, `:x`, or `:wq`. No need to rely on `:q`. -Enjoy your improved, more flexible ChatGPT Neovim plugin with chunking support! +Enjoy your improved, more flexible ChatGPT Neovim plugin with chunking support! \ No newline at end of file diff --git a/lua/chatgpt_nvim/init.lua b/lua/chatgpt_nvim/init.lua index 282d69c..7a84faa 100644 --- a/lua/chatgpt_nvim/init.lua +++ b/lua/chatgpt_nvim/init.lua @@ -80,11 +80,6 @@ end ----------------------------------------------------------------------------- -- CHUNKING ----------------------------------------------------------------------------- --- If chunking is enabled and we exceed the token limit, we split the prompt --- into multiple chunks. We then copy each chunk in turn to the clipboard. --- The user can paste them one by one into ChatGPT. This is a naive approach, --- but helps with extremely large requests. ------------------------------------------------------------------------------ local function handle_chunking_if_needed(prompt, estimate_fn) local conf = config.load() local token_count = estimate_fn(prompt) @@ -96,14 +91,26 @@ local function handle_chunking_if_needed(prompt, estimate_fn) return chunks end +-- Close an existing buffer by name (if it exists) +local function close_existing_buffer_by_name(pattern) + for _, b in ipairs(vim.api.nvim_list_bufs()) do + local name = vim.api.nvim_buf_get_name(b) + if name:match(pattern) then + vim.api.nvim_buf_delete(b, { force = true }) + end + end +end + -- Show the user a preview buffer with the proposed changes (unchanged). local function preview_changes(changes) + close_existing_buffer_by_name("ChatGPT_Changes_Preview$") + local bufnr = vim.api.nvim_create_buf(false, true) vim.api.nvim_buf_set_name(bufnr, "ChatGPT_Changes_Preview") vim.api.nvim_buf_set_option(bufnr, "filetype", "diff") vim.api.nvim_buf_set_lines(bufnr, 0, -1, false, { "# Preview of Changes:", - "# (Close this window to apply changes or run :q to cancel)", + "# (Close this window to apply changes or use :q to cancel)", "" }) for _, fileinfo in ipairs(changes) do @@ -126,12 +133,14 @@ end -- Minimal partial acceptance from previous example local function partial_accept(changes) + close_existing_buffer_by_name("ChatGPT_Partial_Accept$") + local bufnr = vim.api.nvim_create_buf(false, true) vim.api.nvim_buf_set_name(bufnr, "ChatGPT_Partial_Accept") vim.api.nvim_buf_set_option(bufnr, "filetype", "diff") local lines = { - "# Remove or comment out (prepend '#') any changes you do NOT want, then :wq to finalize partial acceptance", + "# Remove or comment out (prepend '#') any changes you do NOT want, then :wq, :x, or :bd to finalize partial acceptance", "" } for _, fileinfo in ipairs(changes) do @@ -202,7 +211,10 @@ local function partial_accept(changes) vim.api.nvim_create_autocmd("BufWriteCmd", { buffer = bufnr, once = true, - callback = on_write + callback = function() + on_write() + vim.cmd("bd! " .. bufnr) -- auto-close buffer + end }) vim.cmd("split") @@ -224,6 +236,8 @@ end -- Utility to store generated prompt in a scratch buffer local function store_prompt_for_reference(prompt) + close_existing_buffer_by_name("ChatGPT_Generated_Prompt$") + local bufnr = vim.api.nvim_create_buf(false, true) vim.api.nvim_buf_set_name(bufnr, "ChatGPT_Generated_Prompt") vim.api.nvim_buf_set_option(bufnr, "filetype", "markdown") @@ -258,6 +272,9 @@ function M.run_chatgpt_command() end end + -- Close existing prompt buffer if open + close_existing_buffer_by_name("ChatGPT_Prompt.md$") + local bufnr = vim.api.nvim_create_buf(false, false) vim.api.nvim_buf_set_name(bufnr, "ChatGPT_Prompt.md") vim.api.nvim_buf_set_option(bufnr, "filetype", "markdown") @@ -268,7 +285,7 @@ function M.run_chatgpt_command() vim.api.nvim_buf_set_lines(bufnr, 0, -1, false, { "# Enter your prompt below.", "", - "Save & close with :wq to finalize your prompt." + "Save & close with :wq, :x, or :bd to finalize your prompt." }) vim.api.nvim_create_autocmd("BufWriteCmd", { @@ -325,19 +342,20 @@ function M.run_chatgpt_command() local chunks = handle_chunking_if_needed(prompt, estimate_fn) if #chunks == 1 then - -- Single chunk, just copy as normal copy_to_clipboard(chunks[1]) vim.api.nvim_out_write("Prompt copied to clipboard! Paste into ChatGPT.\n") else - -- Multiple chunks. We'll store them in separate scratch buffers and also copy the first chunk + -- Multiple chunks. We'll create separate scratch buffers for i, chunk in ipairs(chunks) do + close_existing_buffer_by_name("ChatGPT_Generated_Chunk_" .. i .. "$") + local cbuf = vim.api.nvim_create_buf(false, true) vim.api.nvim_buf_set_name(cbuf, "ChatGPT_Generated_Chunk_" .. i) vim.api.nvim_buf_set_option(cbuf, "filetype", "markdown") local lines = { "# Chunk " .. i .. " of " .. #chunks .. ":", - "# Copy/paste this chunk into ChatGPT, then come back and copy next chunk as needed.", + "# Copy/paste this chunk into ChatGPT, then come back and copy the next chunk as needed.", "" } vim.list_extend(lines, vim.split(chunk, "\n")) @@ -393,7 +411,7 @@ function M.run_chatgpt_paste_command() if is_final then if conf.preview_changes then preview_changes(data.files) - print("Close the preview window to apply changes, or leave it open and use :q to cancel.") + print("Close the preview window to apply changes, or use :q to cancel.") local closed = vim.wait(60000, function() local bufs = vim.api.nvim_list_bufs() for _, b in ipairs(bufs) do @@ -513,4 +531,4 @@ function M.run_chatgpt_paste_command() end end -return M +return M \ No newline at end of file diff --git a/lua/chatgpt_nvim/ui.lua b/lua/chatgpt_nvim/ui.lua index dbde6ed..c4f645e 100644 --- a/lua/chatgpt_nvim/ui.lua +++ b/lua/chatgpt_nvim/ui.lua @@ -4,6 +4,14 @@ local conf = config.load() local debug_bufnr = nil if conf.improved_debug then + -- Check if a debug buffer is already open. Close it first to avoid duplicates. + for _, buf in ipairs(vim.api.nvim_list_bufs()) do + local name = vim.api.nvim_buf_get_name(buf) + if name == "ChatGPT_Debug_Log" then + vim.api.nvim_buf_delete(buf, {force = true}) + end + end + debug_bufnr = vim.api.nvim_create_buf(false, true) vim.api.nvim_buf_set_name(debug_bufnr, "ChatGPT_Debug_Log") vim.api.nvim_buf_set_option(debug_bufnr, "filetype", "log") @@ -21,11 +29,19 @@ end function M.pick_directories(dirs) local selected_dirs = {} - local lines = { "Delete lines for directories you do NOT want, then :wq" } + local lines = { "Delete lines for directories you do NOT want, then save & close (e.g. :wq, :x, or :bd)" } for _, d in ipairs(dirs) do table.insert(lines, d) end + -- If a file selection buffer is already open, close it to avoid confusion + for _, buf in ipairs(vim.api.nvim_list_bufs()) do + local name = vim.api.nvim_buf_get_name(buf) + if name:match("ChatGPT_File_Selection") then + vim.api.nvim_buf_delete(buf, {force = true}) + end + end + local bufnr = vim.api.nvim_create_buf(false, false) vim.api.nvim_buf_set_name(bufnr, "ChatGPT_File_Selection") vim.api.nvim_buf_set_option(bufnr, "filetype", "markdown") @@ -47,7 +63,11 @@ function M.pick_directories(dirs) vim.api.nvim_create_autocmd("BufWriteCmd", { buffer = bufnr, once = true, - callback = on_write + callback = function() + on_write() + -- Automatically close the buffer once saved + vim.cmd("bd! " .. bufnr) + end }) vim.cmd("split") @@ -68,8 +88,6 @@ function M.pick_directories(dirs) return selected_dirs end --- A function to chunk a long string if it exceeds token_limit --- We'll just do rough splits by lines or paragraphs. function M.chunkify(text, estimate_tokens_fn, token_limit) local lines = vim.split(text, "\n") local chunks = {} @@ -80,9 +98,7 @@ function M.chunkify(text, estimate_tokens_fn, token_limit) local test_text = (current_text == "") and line or (current_text .. "\n" .. line) local est_tokens = estimate_tokens_fn(test_text) if est_tokens > token_limit then - -- push current chunk table.insert(chunks, current_text) - -- start a new chunk current_text = line else current_text = test_text @@ -96,4 +112,4 @@ function M.chunkify(text, estimate_tokens_fn, token_limit) return chunks end -return M +return M \ No newline at end of file