From c2e326ca1342b5e272ebfa577e9a57a1e9f8b9f1 Mon Sep 17 00:00:00 2001 From: Tomas Slusny Date: Wed, 5 Feb 2025 02:48:08 +0100 Subject: [PATCH] Add support for default sticky prompts See #716 Signed-off-by: Tomas Slusny --- README.md | 15 ++++++++++++++- lua/CopilotChat/config.lua | 5 ++++- lua/CopilotChat/init.lua | 17 +++++++++++++++++ 3 files changed, 35 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index ce9c7c2f..33af4062 100644 --- a/README.md +++ b/README.md @@ -224,6 +224,17 @@ List all files in the workspace What is 1 + 11 ``` +You can also set default sticky prompts in the configuration: + +```lua +{ + sticky = { + '@models Using Mistral-small', + '#files:full', + } +} +``` + ## Models You can list available models with `:CopilotChatModels` command. Model determines the AI model used for the chat. @@ -417,11 +428,13 @@ Also see [here](/lua/CopilotChat/config.lua): -- Shared config starts here (can be passed to functions at runtime and configured via setup function) system_prompt = prompts.COPILOT_INSTRUCTIONS, -- System prompt to use (can be specified manually in prompt via /). + model = 'gpt-4o', -- Default model to use, see ':CopilotChatModels' for available models (can be specified manually in prompt via $). agent = 'copilot', -- Default agent to use, see ':CopilotChatAgents' for available agents (can be specified manually in prompt via @). context = nil, -- Default context or array of contexts to use (can be specified manually in prompt via #). - temperature = 0.1, -- GPT result temperature + sticky = nil, -- Default sticky prompt or array of sticky prompts to use at start of every new chat. + temperature = 0.1, -- GPT result temperature headless = false, -- Do not write to chat buffer and use history(useful for using callback for custom processing) callback = nil, -- Callback to use when ask response is received diff --git a/lua/CopilotChat/config.lua b/lua/CopilotChat/config.lua index 44182371..47b711ab 100644 --- a/lua/CopilotChat/config.lua +++ b/lua/CopilotChat/config.lua @@ -56,6 +56,7 @@ local utils = require('CopilotChat.utils') ---@field model string? ---@field agent string? ---@field context string|table|nil +---@field sticky string|table|nil ---@field temperature number? ---@field headless boolean? ---@field callback fun(response: string, source: CopilotChat.source)? @@ -90,11 +91,13 @@ return { -- Shared config starts here (can be passed to functions at runtime and configured via setup function) system_prompt = prompts.COPILOT_INSTRUCTIONS, -- System prompt to use (can be specified manually in prompt via /). + model = 'gpt-4o', -- Default model to use, see ':CopilotChatModels' for available models (can be specified manually in prompt via $). agent = 'copilot', -- Default agent to use, see ':CopilotChatAgents' for available agents (can be specified manually in prompt via @). context = nil, -- Default context or array of contexts to use (can be specified manually in prompt via #). - temperature = 0.1, -- GPT result temperature + sticky = nil, -- Default sticky prompt or array of sticky prompts to use at start of every new chat. + temperature = 0.1, -- GPT result temperature headless = false, -- Do not write to chat buffer and use history(useful for using callback for custom processing) callback = nil, -- Callback to use when ask response is received diff --git a/lua/CopilotChat/init.lua b/lua/CopilotChat/init.lua index ca608fa3..2b93c8cf 100644 --- a/lua/CopilotChat/init.lua +++ b/lua/CopilotChat/init.lua @@ -315,6 +315,23 @@ local function finish(start_of_chat) state.chat:append(M.config.question_header .. M.config.separator .. '\n\n') + -- Add default sticky prompts after reset + if start_of_chat then + if M.config.sticky then + local last_prompt = state.last_prompt or '' + + if type(M.config.sticky) == 'table' then + for _, sticky in ipairs(M.config.sticky) do + last_prompt = last_prompt .. '\n> ' .. sticky + end + else + last_prompt = last_prompt .. '\n> ' .. M.config.sticky + end + + state.last_prompt = last_prompt + end + end + -- Reinsert sticky prompts from last prompt if state.last_prompt then local has_sticky = false