summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGrail Finder <wohilas@gmail.com>2026-02-17 13:42:49 +0300
committerGrail Finder <wohilas@gmail.com>2026-02-17 13:42:49 +0300
commitb67ae1be98f3131fa8cec9ae01d8f86fd4df8e06 (patch)
tree2a00b4bf75cb59e6643e14e50c7f51c278411129
parent372e49199b58281bf1c7f75dfa2835189bc61383 (diff)
Enha: filter out thinking blocks from chat history, removed {role}:
-rw-r--r--config.example.toml1
-rw-r--r--config/config.go1
-rw-r--r--helpfuncs.go19
-rw-r--r--llm.go62
4 files changed, 38 insertions, 45 deletions
diff --git a/config.example.toml b/config.example.toml
index 1b466eb..c2ec684 100644
--- a/config.example.toml
+++ b/config.example.toml
@@ -48,3 +48,4 @@ EnableMouse = false # Enable mouse support in the UI
CharSpecificContextEnabled = true
CharSpecificContextTag = "@"
AutoTurn = true
+StripThinkingFromAPI = true # Strip <think> blocks from messages before sending to LLM (keeps them in chat history)
diff --git a/config/config.go b/config/config.go
index d5c059f..e8c7bd4 100644
--- a/config/config.go
+++ b/config/config.go
@@ -19,6 +19,7 @@ type Config struct {
ToolRole string `toml:"ToolRole"`
ToolUse bool `toml:"ToolUse"`
ThinkUse bool `toml:"ThinkUse"`
+ StripThinkingFromAPI bool `toml:"StripThinkingFromAPI"`
AssistantRole string `toml:"AssistantRole"`
SysDir string `toml:"SysDir"`
ChunkLimit uint32 `toml:"ChunkLimit"`
diff --git a/helpfuncs.go b/helpfuncs.go
index b6b5faa..cc6bfe3 100644
--- a/helpfuncs.go
+++ b/helpfuncs.go
@@ -23,6 +23,25 @@ func isASCII(s string) bool {
return true
}
+// stripThinkingFromMsg removes thinking blocks from assistant messages.
+// Skips user, tool, and system messages as they may contain thinking examples.
+func stripThinkingFromMsg(msg models.RoleMsg) *models.RoleMsg {
+ if !cfg.StripThinkingFromAPI {
+ return &msg
+ }
+ // Skip user, tool, and system messages - they might contain thinking examples
+ if msg.Role == cfg.UserRole || msg.Role == cfg.ToolRole || msg.Role == "system" {
+ return &msg
+ }
+ // Strip thinking from assistant messages
+ if thinkRE.MatchString(msg.Content) {
+ msg.Content = thinkRE.ReplaceAllString(msg.Content, "")
+ // Clean up any double newlines that might result
+ msg.Content = strings.TrimSpace(msg.Content)
+ }
+ return &msg
+}
+
// refreshChatDisplay updates the chat display based on current character view
// It filters messages for the character the user is currently "writing as"
// and updates the textView with the filtered conversation
diff --git a/llm.go b/llm.go
index 1b5e3fb..fffa994 100644
--- a/llm.go
+++ b/llm.go
@@ -13,28 +13,6 @@ var imageAttachmentPath string // Global variable to track image attachment for
var lastImg string // for ctrl+j
var RAGMsg = "Retrieved context for user's query:\n"
-// addPersonaSuffixToLastUserMessage adds the persona suffix to the last user message
-// to indicate to the assistant who it should reply as
-func addPersonaSuffixToLastUserMessage(messages []models.RoleMsg, persona string) []models.RoleMsg {
- if len(messages) == 0 {
- return messages
- }
- // // Find the last user message to modify
- // for i := len(messages) - 1; i >= 0; i-- {
- // if messages[i].Role == cfg.UserRole || messages[i].Role == "user" {
- // // Create a copy of the message to avoid modifying the original
- // modifiedMsg := messages[i]
- // modifiedMsg.Content = modifiedMsg.Content + "\n" + persona + ":"
- // messages[i] = modifiedMsg
- // break
- // }
- // }
- modifiedMsg := messages[len(messages)-1]
- modifiedMsg.Content = modifiedMsg.Content + "\n" + persona + ":\n"
- messages[len(messages)-1] = modifiedMsg
- return messages
-}
-
// containsToolSysMsg checks if the toolSysMsg already exists in the chat body
func containsToolSysMsg() bool {
for _, msg := range chatBody.Messages {
@@ -187,7 +165,7 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
messages := make([]string, len(filteredMessages))
for i, m := range filteredMessages {
- messages[i] = m.ToPrompt()
+ messages[i] = stripThinkingFromMsg(m).ToPrompt()
}
prompt := strings.Join(messages, "\n")
// Add multimodal media markers to the prompt text when multimodal data is present
@@ -341,23 +319,21 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
logger.Debug("LCPChat: RAG message added to chat body", "role", ragMsg.Role,
"rag_content_len", len(ragMsg.Content), "message_count_after_rag", len(chatBody.Messages))
}
- filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
+ filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
// openai /v1/chat does not support custom roles; needs to be user, assistant, system
// Add persona suffix to the last user message to indicate who the assistant should reply as
- if cfg.AutoTurn && !resume {
- filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, botPersona)
- }
bodyCopy := &models.ChatBody{
Messages: make([]models.RoleMsg, len(filteredMessages)),
Model: chatBody.Model,
Stream: chatBody.Stream,
}
for i, msg := range filteredMessages {
- if msg.Role == cfg.UserRole {
- bodyCopy.Messages[i] = msg
+ strippedMsg := *stripThinkingFromMsg(msg)
+ if strippedMsg.Role == cfg.UserRole {
+ bodyCopy.Messages[i] = strippedMsg
bodyCopy.Messages[i].Role = "user"
} else {
- bodyCopy.Messages[i] = msg
+ bodyCopy.Messages[i] = strippedMsg
}
}
// Clean null/empty messages to prevent API issues
@@ -437,7 +413,7 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
messages := make([]string, len(filteredMessages))
for i, m := range filteredMessages {
- messages[i] = m.ToPrompt()
+ messages[i] = stripThinkingFromMsg(m).ToPrompt()
}
prompt := strings.Join(messages, "\n")
// strings builder?
@@ -519,22 +495,20 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages))
}
// Create copy of chat body with standardized user role
- filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
+ filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
// Add persona suffix to the last user message to indicate who the assistant should reply as
- if cfg.AutoTurn && !resume {
- filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, botPersona)
- }
bodyCopy := &models.ChatBody{
Messages: make([]models.RoleMsg, len(filteredMessages)),
Model: chatBody.Model,
Stream: chatBody.Stream,
}
for i, msg := range filteredMessages {
- if msg.Role == cfg.UserRole || i == 1 {
- bodyCopy.Messages[i] = msg
+ strippedMsg := *stripThinkingFromMsg(msg)
+ if strippedMsg.Role == cfg.UserRole || i == 1 {
+ bodyCopy.Messages[i] = strippedMsg
bodyCopy.Messages[i].Role = "user"
} else {
- bodyCopy.Messages[i] = msg
+ bodyCopy.Messages[i] = strippedMsg
}
}
// Clean null/empty messages to prevent API issues
@@ -605,7 +579,7 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader
filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
messages := make([]string, len(filteredMessages))
for i, m := range filteredMessages {
- messages[i] = m.ToPrompt()
+ messages[i] = stripThinkingFromMsg(m).ToPrompt()
}
prompt := strings.Join(messages, "\n")
// strings builder?
@@ -718,21 +692,19 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages))
}
// Create copy of chat body with standardized user role
- filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages)
+ filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages)
// Add persona suffix to the last user message to indicate who the assistant should reply as
- if cfg.AutoTurn && !resume {
- filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, botPersona)
- }
bodyCopy := &models.ChatBody{
Messages: make([]models.RoleMsg, len(filteredMessages)),
Model: chatBody.Model,
Stream: chatBody.Stream,
}
for i, msg := range filteredMessages {
- bodyCopy.Messages[i] = msg
+ strippedMsg := *stripThinkingFromMsg(msg)
+ bodyCopy.Messages[i] = strippedMsg
// Standardize role if it's a user role
if bodyCopy.Messages[i].Role == cfg.UserRole {
- bodyCopy.Messages[i] = msg
+ bodyCopy.Messages[i] = strippedMsg
bodyCopy.Messages[i].Role = "user"
}
}