From e97aa81d8f6c57359968af0ad990e1ee4eae015f Mon Sep 17 00:00:00 2001 From: Dominik Polakovics Date: Sat, 14 Dec 2024 12:48:16 +0100 Subject: [PATCH] feat: add debug logging --- .chatgpt_config.yaml | 1 + lua/chatgpt_nvim/config.lua | 19 +++++----- lua/chatgpt_nvim/context.lua | 38 ++++++++++++++++++- lua/chatgpt_nvim/handler.lua | 27 +++++++++++++- lua/chatgpt_nvim/init.lua | 71 ++++++++++++++++-------------------- 5 files changed, 104 insertions(+), 52 deletions(-) diff --git a/.chatgpt_config.yaml b/.chatgpt_config.yaml index 3d55587..247ecb1 100644 --- a/.chatgpt_config.yaml +++ b/.chatgpt_config.yaml @@ -3,3 +3,4 @@ default_prompt_blocks: - "basic-prompt" directories: - "." +debug: false diff --git a/lua/chatgpt_nvim/config.lua b/lua/chatgpt_nvim/config.lua index 2234695..e3e370c 100644 --- a/lua/chatgpt_nvim/config.lua +++ b/lua/chatgpt_nvim/config.lua @@ -1,11 +1,3 @@ --- lua/chatgpt_nvim/config.lua --- Modified to: --- 1) Determine the Git root based on the currently opened file. --- 2) If no file is open or not in Git repo, fallback to current working directory. --- 3) Add support for configuring a list of default prompt blocks ("go-development", "typo3-development", "basic-prompt") that can override the initial prompt if provided. --- 4) Add support for configuring a token limit. --- 5) Load a project_name from the .chatgpt_config.yaml for project verification. - local M = {} local uv = vim.loop @@ -61,7 +53,8 @@ function M.load() directories = { "." }, default_prompt_blocks = {}, token_limit = 128000, - project_name = "" + project_name = "", + debug = false } if fd then @@ -86,6 +79,9 @@ function M.load() if type(result.project_name) == "string" then config.project_name = result.project_name end + if type(result.debug) == "boolean" then + config.debug = result.debug + end end end else @@ -106,6 +102,11 @@ function M.load() end end + if config.debug then + vim.api.nvim_out_write("[chatgpt_nvim:config] Loaded config from: " .. path .. "\n") + vim.api.nvim_out_write("[chatgpt_nvim:config] Debug logging is enabled.\n") + end + return config end diff --git a/lua/chatgpt_nvim/context.lua b/lua/chatgpt_nvim/context.lua index 7e750eb..9983227 100644 --- a/lua/chatgpt_nvim/context.lua +++ b/lua/chatgpt_nvim/context.lua @@ -1,11 +1,16 @@ local M = {} local uv = vim.loop +local config = require('chatgpt_nvim.config') local function load_gitignore_patterns(root) + local conf = config.load() local gitignore_path = root .. "/.gitignore" local fd = uv.fs_open(gitignore_path, "r", 438) if not fd then + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:context] No .gitignore found.\n") + end return {} end local stat = uv.fs_fstat(fd) @@ -19,6 +24,9 @@ local function load_gitignore_patterns(root) patterns[#patterns+1] = line end end + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:context] Loaded " .. #patterns .. " gitignore patterns.\n") + end return patterns end @@ -32,23 +40,35 @@ local function should_ignore_file(file, ignore_patterns) end local function is_text_file(file) + local conf = config.load() local fd = uv.fs_open(file, "r", 438) if not fd then + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:context] Could not open file: " .. file .. " for reading.\n") + end return false end local chunk = uv.fs_read(fd, 1024, 0) or "" uv.fs_close(fd) -- Check for null bytes as a heuristic for binary files if chunk:find("\0") then + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:context] File appears binary: " .. file .. "\n") + end return false end return true end local function scandir(dir, ignore_patterns, files) - files = files or {} + local conf = config.load() local fd = uv.fs_opendir(dir, nil, 50) - if not fd then return files end + if not fd then + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:context] Could not open dir: " .. dir .. "\n") + end + return files + end while true do local ents = uv.fs_readdir(fd) if not ents then break end @@ -60,6 +80,10 @@ local function scandir(dir, ignore_patterns, files) elseif ent.type == "directory" and ent.name ~= ".git" then scandir(fullpath, ignore_patterns, files) end + else + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:context] Ignoring file/dir: " .. fullpath .. "\n") + end end end end @@ -68,6 +92,7 @@ local function scandir(dir, ignore_patterns, files) end function M.get_project_files(directories) + local conf = config.load() local root = vim.fn.getcwd() local ignore_patterns = load_gitignore_patterns(root) local all_files = {} @@ -85,6 +110,10 @@ function M.get_project_files(directories) table.insert(rel_files, rel) end + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:context] Found " .. #rel_files .. " project files.\n") + end + return rel_files end @@ -95,6 +124,7 @@ function M.get_project_structure(directories) end function M.get_file_contents(files) + local conf = config.load() local root = vim.fn.getcwd() local sections = {} for _, f in ipairs(files) do @@ -111,6 +141,10 @@ function M.get_file_contents(files) else uv.fs_close(fd) end + else + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:context] Could not open file for content: " .. f .. "\n") + end end end return table.concat(sections, "\n") diff --git a/lua/chatgpt_nvim/handler.lua b/lua/chatgpt_nvim/handler.lua index 90c523b..b5ebc26 100644 --- a/lua/chatgpt_nvim/handler.lua +++ b/lua/chatgpt_nvim/handler.lua @@ -1,6 +1,8 @@ local M = {} local uv = vim.loop +local config = require('chatgpt_nvim.config') + local function ensure_dir(path) local st = uv.fs_stat(path) if st and st.type == 'directory' then @@ -15,10 +17,16 @@ local function ensure_dir(path) end function M.get_clipboard_content() - return vim.fn.getreg('+') + local conf = config.load() + local content = vim.fn.getreg('+') + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:handler] Clipboard content length: " .. #content .. "\n") + end + return content end function M.write_file(filepath, content) + local conf = config.load() local dir = filepath:match("(.*)/") if dir and dir ~= "" then ensure_dir(dir) @@ -26,21 +34,38 @@ function M.write_file(filepath, content) local fd = uv.fs_open(filepath, "w", 438) if not fd then vim.api.nvim_err_writeln("Could not open file for writing: " .. filepath) + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:handler] Failed to open file for writing: " .. filepath .. "\n") + end return end uv.fs_write(fd, content, -1) uv.fs_close(fd) + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:handler] Successfully wrote file: " .. filepath .. "\n") + end end function M.delete_file(filepath) + local conf = config.load() local st = uv.fs_stat(filepath) if st then local success, err = uv.fs_unlink(filepath) if not success then vim.api.nvim_err_writeln("Could not delete file: " .. filepath .. " - " .. (err or "unknown error")) + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:handler] Failed to delete file: " .. filepath .. "\n") + end + else + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:handler] Deleted file: " .. filepath .. "\n") + end end else vim.api.nvim_err_writeln("File not found, cannot delete: " .. filepath) + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:handler] File not found for deletion: " .. filepath .. "\n") + end end end diff --git a/lua/chatgpt_nvim/init.lua b/lua/chatgpt_nvim/init.lua index 29e9e24..32e6238 100644 --- a/lua/chatgpt_nvim/init.lua +++ b/lua/chatgpt_nvim/init.lua @@ -1,4 +1,3 @@ --- lua/chatgpt_nvim/init.lua local M = {} local context = require('chatgpt_nvim.context') @@ -12,6 +11,7 @@ local function copy_to_clipboard(text) end local function parse_response(raw) + local conf = config.load() if not ok_yaml then vim.api.nvim_err_writeln("lyaml not available. Install with `luarocks install lyaml`.") return nil @@ -19,8 +19,14 @@ local function parse_response(raw) local ok, data = pcall(lyaml.load, raw) if not ok or not data then vim.api.nvim_err_writeln("Failed to parse YAML response.") + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:init] RAW response that failed parsing:\n" .. raw .. "\n") + end return nil end + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:init] Successfully parsed YAML response.\n") + end return data end @@ -37,32 +43,14 @@ local function is_subpath(root, path) return target_abs:sub(1, #root_abs) == root_abs end --- The improved workflow: --- --- :ChatGPT: --- 1) Create a prompt that includes: --- - The project's name and structure, but NOT file contents. --- - The initial instructions including that the O1 Model should respond with a YAML listing which files are needed. --- 2) Copy that prompt to clipboard. --- --- The O1 Model responds with a YAML listing files it needs. --- --- :ChatGPTPaste: --- 1) Parse the YAML from clipboard. --- 2) If the YAML lists files needed (no final changes), generate a new prompt that includes the requested files content plus a reminder that O1 Model can ask for more files. --- 3) Copy that prompt to clipboard for O1 Model. --- --- The O1 Model can continue to ask for more files. Each time we run :ChatGPTPaste with the new YAML, we provide more files. Eventually, the O1 Model returns the final YAML with `project_name` and `files` changes. --- --- When final YAML with `files` and `project_name` matches current project, we apply the changes. --- --- If more context is needed, we ask user outside the YAML. - -- We'll store requested files in a global variable for now, though a more robust solution might be needed. local requested_files = {} function M.run_chatgpt_command() local conf = config.load() + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:init] Running :ChatGPT command.\n") + end local user_input = vim.fn.input("Message for O1 Model: ") if user_input == "" then print("No input provided.") @@ -72,15 +60,6 @@ function M.run_chatgpt_command() local dirs = conf.directories or {"."} local project_structure = context.get_project_structure(dirs) - -- Initial prompt without file contents, asking the O1 model which files are needed. - -- We'll instruct the O1 Model to respond with a YAML that includes the files it needs. - -- Format of the O1 model response should be something like: - -- project_name: - -- files: - -- - path: "path/to/needed_file" - -- - -- No changes yet, just asking for which files the model wants. - local initial_sections = { conf.initial_prompt .. "\n" .. user_input, "\n\nProject name: " .. (conf.project_name or "") .. "\n", @@ -95,6 +74,10 @@ function M.run_chatgpt_command() local token_limit = conf.token_limit or 8000 local token_count = estimate_tokens(prompt) + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:init] Prompt token count: " .. token_count .. "\n") + end + if token_count > token_limit then print("Too many files in project structure. The request exceeds the O1 model limit of " .. token_limit .. " tokens.") return @@ -105,8 +88,11 @@ function M.run_chatgpt_command() end function M.run_chatgpt_paste_command() - print("Reading ChatGPT YAML response from clipboard...") local conf = config.load() + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:init] Running :ChatGPTPaste command.\n") + end + print("Reading ChatGPT YAML response from clipboard...") local raw = handler.get_clipboard_content() if raw == "" then vim.api.nvim_err_writeln("Clipboard is empty. Please copy the YAML response from ChatGPT first.") @@ -118,12 +104,10 @@ function M.run_chatgpt_paste_command() return end - -- Check if this is the final answer (with modifications) or just requesting more files. if data.project_name and data.files then - -- The O1 model provided a YAML with files. We must check if these files contain content to apply changes. - -- If 'delete: true' or 'content' is found, that means it's the final set of changes. - -- If only paths are listed without 'content' or 'delete', that means the model is asking for files. - + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:init] Received project_name and files from response.\n") + end local is_final = false for _, fileinfo in ipairs(data.files) do if fileinfo.content or fileinfo.delete == true then @@ -133,7 +117,6 @@ function M.run_chatgpt_paste_command() end if is_final then - -- Final changes: apply them if data.project_name ~= conf.project_name then vim.api.nvim_err_writeln("Project name mismatch. The provided changes are for project '" .. (data.project_name or "unknown") .. "' but current project is '" .. @@ -149,16 +132,21 @@ function M.run_chatgpt_paste_command() goto continue end - -- Ensure the path is within the project root if not is_subpath(root, fileinfo.path) then vim.api.nvim_err_writeln("Invalid file path outside project root: " .. fileinfo.path) goto continue end if fileinfo.delete == true then + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:init] Deleting file: " .. fileinfo.path .. "\n") + end handler.delete_file(fileinfo.path) print("Deleted file: " .. fileinfo.path) elseif fileinfo.content then + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:init] Writing file: " .. fileinfo.path .. "\n") + end handler.write_file(fileinfo.path, fileinfo.content) print("Wrote file: " .. fileinfo.path) else @@ -201,7 +189,6 @@ function M.run_chatgpt_paste_command() end end - -- Create a new prompt including the requested files plus a reminder that the model can ask for more. local sections = { conf.initial_prompt, "\n\nProject name: " .. (conf.project_name or ""), @@ -215,6 +202,10 @@ function M.run_chatgpt_paste_command() local token_limit = conf.token_limit or 8000 local token_count = estimate_tokens(prompt) + if conf.debug then + vim.api.nvim_out_write("[chatgpt_nvim:init] Returning requested files. Token count: " .. token_count .. "\n") + end + if token_count > token_limit then vim.api.nvim_err_writeln("Too many requested files. Exceeds token limit.") return