Skip to content

Commit 69b32f2

Browse files
committed
feat: allow disabling context tip
In some cases context tip can lead llms to attempt tool usages by disabling system prompt will no longer include context tips
1 parent 55f2162 commit 69b32f2

File tree

4 files changed

+10
-3
lines changed

4 files changed

+10
-3
lines changed

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -491,6 +491,7 @@ Below are all available configuration options with their default values:
491491
auto_insert_mode = false, -- Automatically enter insert mode when opening window and on new prompt
492492
insert_at_end = false, -- Move cursor to end of buffer when inserting text
493493
clear_chat_on_new_prompt = false, -- Clears chat on every new prompt
494+
disable_context_tip = false, -- Disable context tip/help in the system prompts (this lists tools available in the current context and hints the LLM to use them)
494495

495496
-- Static config starts here (can be configured only via setup function)
496497

lua/CopilotChat/client.lua

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
---@field agent string?
99
---@field temperature number
1010
---@field on_progress? fun(response: string):nil
11+
---@field disable_context_tip boolean?
1112

1213
---@class CopilotChat.Client.model : CopilotChat.Provider.model
1314
---@field provider string
@@ -199,13 +200,15 @@ end
199200
--- @param prompt string
200201
--- @param system_prompt string
201202
--- @param generated_messages table<CopilotChat.Provider.input>
202-
local function generate_ask_request(history, contexts, prompt, system_prompt, generated_messages)
203+
--- @param opts table?
204+
local function generate_ask_request(history, contexts, prompt, system_prompt, generated_messages, opts)
203205
local messages = {}
206+
opts = opts or {}
204207

205208
system_prompt = vim.trim(system_prompt)
206209

207210
-- Include context help
208-
if contexts and not vim.tbl_isempty(contexts) then
211+
if contexts and not vim.tbl_isempty(contexts) and not opts.disable_context_tip then
209212
local help_text = [[When you need additional context, request it using this format:
210213
211214
> #<command>:`<input>`
@@ -657,7 +660,7 @@ function Client:ask(prompt, opts)
657660

658661
local headers = self:authenticate(provider_name)
659662
local request = provider.prepare_input(
660-
generate_ask_request(history, opts.contexts, prompt, opts.system_prompt, generated_messages),
663+
generate_ask_request(history, opts.contexts, prompt, opts.system_prompt, generated_messages, opts),
661664
options
662665
)
663666
local is_stream = request.stream

lua/CopilotChat/config.lua

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ local select = require('CopilotChat.select')
3737
---@field auto_insert_mode boolean?
3838
---@field insert_at_end boolean?
3939
---@field clear_chat_on_new_prompt boolean?
40+
---@field disable_context_tip boolean?
4041

4142
--- CopilotChat default configuration
4243
---@class CopilotChat.config : CopilotChat.config.shared
@@ -101,6 +102,7 @@ return {
101102
auto_insert_mode = false, -- Automatically enter insert mode when opening window and on new prompt
102103
insert_at_end = false, -- Move cursor to end of buffer when inserting text
103104
clear_chat_on_new_prompt = false, -- Clears chat on every new prompt
105+
disable_context_tip = false, -- Disable context tip/help in the prompts
104106

105107
-- Static config starts here (can be configured only via setup function)
106108

lua/CopilotChat/init.lua

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -895,6 +895,7 @@ function M.ask(prompt, config)
895895
model = selected_model,
896896
agent = selected_agent,
897897
temperature = config.temperature,
898+
disable_context_tip = config.disable_context_tip,
898899
on_progress = vim.schedule_wrap(function(token)
899900
local out = config.stream and config.stream(token, state.source) or nil
900901
if out == nil then

0 commit comments

Comments
 (0)