From 00e48586ad3f8a8ee0341ba352b84f014e2bf41d Mon Sep 17 00:00:00 2001 From: Dominik Polakovics Date: Thu, 2 Jan 2025 13:44:42 +0100 Subject: [PATCH] feat: add the :ChatGPTCurrentBuffer command --- lua/chatgpt_nvim/init.lua | 84 ++++++++++++++++++++++++++++++++++----- plugin/chatgpt.vim | 1 + 2 files changed, 76 insertions(+), 9 deletions(-) diff --git a/lua/chatgpt_nvim/init.lua b/lua/chatgpt_nvim/init.lua index 31ae58e..8d1f1f9 100644 --- a/lua/chatgpt_nvim/init.lua +++ b/lua/chatgpt_nvim/init.lua @@ -83,7 +83,7 @@ end local function handle_step_by_step_if_needed(prompt, estimate_fn) local conf = config.load() local token_count = estimate_fn(prompt) - -- If step-by-step is disabled or token count is within limit, return original prompt + -- If step-by-step is disabled or token count is within limit, return the original prompt if not conf.enable_step_by_step or token_count <= (conf.token_limit or 8000) then return { prompt } end @@ -91,10 +91,11 @@ local function handle_step_by_step_if_needed(prompt, estimate_fn) -- If we exceed the token limit, create a single message prompting the user to split tasks local step_prompt = [[ It appears this request might exceed the model's token limit if done all at once. - Please break down the tasks into smaller steps and handle them one by one. + Please break down the tasks into smaller steps and handle them one by one. At each step, we'll provide relevant files or context if needed. Thank you! ]] + return { step_prompt } end @@ -220,7 +221,7 @@ local function partial_accept(changes) once = true, callback = function() on_write() - vim.cmd("bd! " .. bufnr) -- auto-close buffer + vim.cmd("bd! " .. bufnr) end }) @@ -348,13 +349,10 @@ function M.run_chatgpt_command() local estimate_fn = get_estimate_fn() local chunks = handle_step_by_step_if_needed(prompt, estimate_fn) - -- We either get the original prompt or a single special prompt copy_to_clipboard(chunks[1]) if #chunks == 1 then vim.api.nvim_out_write("Prompt copied to clipboard! Paste into ChatGPT.\n") else - -- In this revised approach, we no longer generate multiple buffers. - -- We simply rely on the single step-by-step prompt in chunks[1]. vim.api.nvim_out_write("Step-by-step prompt copied to clipboard!\n") end @@ -416,7 +414,6 @@ function M.run_chatgpt_paste_command() vim.api.nvim_err_writeln("Preview not closed in time. Aborting.") return end - end local final_files = data.files if conf.partial_acceptance then @@ -488,10 +485,9 @@ function M.run_chatgpt_paste_command() local token_count = estimate_fn(prompt) ui.debug_log("Returning requested files. Token count: " .. token_count) - -- We simply check if step-by-step is enabled, then provide a short note if needed if token_count > (conf.token_limit or 8000) and conf.enable_step_by_step then local step_message = [[ - It appears this requested data is quite large. Please lets split the task into smaller steps + It appears this requested data is quite large. Please split the task into smaller steps and continue step by step. Which files would you need for the first step? ]] @@ -511,4 +507,74 @@ function M.run_chatgpt_paste_command() end end +---------------------------------------------------------------------------- +-- run_chatgpt_current_buffer_command +---------------------------------------------------------------------------- +function M.run_chatgpt_current_buffer_command() + local conf = config.load() + ui.debug_log("Running :ChatGPTCurrentBuffer command.") + + -- Get the content of the current buffer as user instructions + local lines = vim.api.nvim_buf_get_lines(0, 0, -1, false) + local user_input = table.concat(lines, "\n") + + -- Possibly let user select directories if interactive_file_selection is true + local dirs = conf.directories or {"."} + if conf.interactive_file_selection then + dirs = ui.pick_directories(dirs) + if #dirs == 0 then + dirs = conf.directories + end + end + + local project_structure = context.get_project_structure(dirs) + local initial_files = conf.initial_files or {} + local included_sections = {} + + for _, item in ipairs(initial_files) do + local root = vim.fn.getcwd() + local full_path = root .. "/" .. item + if is_directory(full_path) then + local dir_files = context.get_project_files({item}) + for _, f in ipairs(dir_files) do + local path = root .. "/" .. f + local data = read_file(path) + if data then + table.insert(included_sections, "\nFile: `" .. f .. "`\n```\n" .. data .. "\n```\n") + end + end + else + local data = read_file(full_path) + if data then + table.insert(included_sections, "\nFile: `" .. item .. "`\n```\n" .. data .. "\n```\n") + end + end + end + + local initial_sections = { + "### Basic Prompt Instructions:\n", + conf.initial_prompt .. "\n\n\n", + "### User Instructions:\n", + user_input .. "\n\n\n", + "### Context/Data:\n", + "Project name: " .. (conf.project_name or "") .. "\n", + "Project Structure:\n", + project_structure, + table.concat(included_sections, "\n") + } + + local prompt = table.concat(initial_sections, "\n") + store_prompt_for_reference(prompt) + + local estimate_fn = get_estimate_fn() + local chunks = handle_step_by_step_if_needed(prompt, estimate_fn) + + copy_to_clipboard(chunks[1]) + if #chunks == 1 then + vim.api.nvim_out_write("Prompt (from current buffer) copied to clipboard! Paste into ChatGPT.\n") + else + vim.api.nvim_out_write("Step-by-step prompt (from current buffer) copied to clipboard!\n") + end +end + return M diff --git a/plugin/chatgpt.vim b/plugin/chatgpt.vim index 13a5588..7962a18 100644 --- a/plugin/chatgpt.vim +++ b/plugin/chatgpt.vim @@ -1,2 +1,3 @@ command! ChatGPT lua require('chatgpt_nvim').run_chatgpt_command() command! ChatGPTPaste lua require('chatgpt_nvim').run_chatgpt_paste_command() +command! ChatGPTCurrentBuffer lua require('chatgpt_nvim').run_chatgpt_current_buffer_command()