From eb44b1e4b244e5a93e7d465b14df39819d8dfaba Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Fri, 16 Jan 2026 16:53:19 +0300 Subject: Feat: impl attempt --- llm.go | 66 ++++++++++++++++++++++++++++++++++++++++++------------------------ 1 file changed, 42 insertions(+), 24 deletions(-) (limited to 'llm.go') diff --git a/llm.go b/llm.go index 5621ecf..5599d21 100644 --- a/llm.go +++ b/llm.go @@ -34,6 +34,24 @@ func ClearImageAttachment() { imageAttachmentPath = "" } +// filterMessagesForCurrentCharacter filters messages based on char-specific context. +// Returns filtered messages and the bot persona role (target character). +func filterMessagesForCurrentCharacter(messages []models.RoleMsg) ([]models.RoleMsg, string) { + if cfg == nil || !cfg.CharSpecificContextEnabled { + botPersona := cfg.AssistantRole + if cfg.WriteNextMsgAsCompletionAgent != "" { + botPersona = cfg.WriteNextMsgAsCompletionAgent + } + return messages, botPersona + } + botPersona := cfg.AssistantRole + if cfg.WriteNextMsgAsCompletionAgent != "" { + botPersona = cfg.WriteNextMsgAsCompletionAgent + } + filtered := filterMessagesForCharacter(messages, botPersona) + return filtered, botPersona +} + type ChunkParser interface { ParseChunk([]byte) (*models.TextChunk, error) FormMsg(msg, role string, cont bool) (io.Reader, error) @@ -113,6 +131,7 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro } if msg != "" { // otherwise let the bot to continue newMsg := models.RoleMsg{Role: role, Content: msg} + newMsg = processMessageTag(newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } if !resume { @@ -136,17 +155,14 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro // add to chat body chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) } - messages := make([]string, len(chatBody.Messages)) - for i, m := range chatBody.Messages { + filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages) + messages := make([]string, len(filteredMessages)) + for i, m := range filteredMessages { messages[i] = m.ToPrompt() } prompt := strings.Join(messages, "\n") // strings builder? if !resume { - botPersona := cfg.AssistantRole - if cfg.WriteNextMsgAsCompletionAgent != "" { - botPersona = cfg.WriteNextMsgAsCompletionAgent - } botMsgStart := "\n" + botPersona + ":\n" prompt += botMsgStart } @@ -270,6 +286,7 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) { // Create a simple text message newMsg = models.NewRoleMsg(role, msg) } + newMsg = processMessageTag(newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) logger.Debug("LCPChat FormMsg: added message to chatBody", "role", newMsg.Role, "content_len", len(newMsg.Content), "message_count_after_add", len(chatBody.Messages)) } @@ -291,12 +308,13 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) { } } // openai /v1/chat does not support custom roles; needs to be user, assistant, system + filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages) bodyCopy := &models.ChatBody{ - Messages: make([]models.RoleMsg, len(chatBody.Messages)), + Messages: make([]models.RoleMsg, len(filteredMessages)), Model: chatBody.Model, Stream: chatBody.Stream, } - for i, msg := range chatBody.Messages { + for i, msg := range filteredMessages { if msg.Role == cfg.UserRole { bodyCopy.Messages[i] = msg bodyCopy.Messages[i].Role = "user" @@ -348,6 +366,7 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader logger.Debug("formmsg deepseekercompletion", "link", cfg.CurrentAPI) if msg != "" { // otherwise let the bot to continue newMsg := models.RoleMsg{Role: role, Content: msg} + newMsg = processMessageTag(newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } if !resume { @@ -372,17 +391,14 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader // add to chat body chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) } - messages := make([]string, len(chatBody.Messages)) - for i, m := range chatBody.Messages { + filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages) + messages := make([]string, len(filteredMessages)) + for i, m := range filteredMessages { messages[i] = m.ToPrompt() } prompt := strings.Join(messages, "\n") // strings builder? if !resume { - botPersona := cfg.AssistantRole - if cfg.WriteNextMsgAsCompletionAgent != "" { - botPersona = cfg.WriteNextMsgAsCompletionAgent - } botMsgStart := "\n" + botPersona + ":\n" prompt += botMsgStart } @@ -432,6 +448,7 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro logger.Debug("formmsg deepseekerchat", "link", cfg.CurrentAPI) if msg != "" { // otherwise let the bot continue newMsg := models.RoleMsg{Role: role, Content: msg} + newMsg = processMessageTag(newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } if !resume { @@ -451,12 +468,13 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages)) } } + filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages) bodyCopy := &models.ChatBody{ - Messages: make([]models.RoleMsg, len(chatBody.Messages)), + Messages: make([]models.RoleMsg, len(filteredMessages)), Model: chatBody.Model, Stream: chatBody.Stream, } - for i, msg := range chatBody.Messages { + for i, msg := range filteredMessages { if msg.Role == cfg.UserRole || i == 1 { bodyCopy.Messages[i] = msg bodyCopy.Messages[i].Role = "user" @@ -502,6 +520,7 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader logger.Debug("formmsg openroutercompletion", "link", cfg.CurrentAPI) if msg != "" { // otherwise let the bot to continue newMsg := models.RoleMsg{Role: role, Content: msg} + newMsg = processMessageTag(newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } if !resume { @@ -525,17 +544,14 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader // add to chat body chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) } - messages := make([]string, len(chatBody.Messages)) - for i, m := range chatBody.Messages { + filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages) + messages := make([]string, len(filteredMessages)) + for i, m := range filteredMessages { messages[i] = m.ToPrompt() } prompt := strings.Join(messages, "\n") // strings builder? if !resume { - botPersona := cfg.AssistantRole - if cfg.WriteNextMsgAsCompletionAgent != "" { - botPersona = cfg.WriteNextMsgAsCompletionAgent - } botMsgStart := "\n" + botPersona + ":\n" prompt += botMsgStart } @@ -619,6 +635,7 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro // Create a simple text message newMsg = models.NewRoleMsg(role, msg) } + newMsg = processMessageTag(newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } if !resume { @@ -639,12 +656,13 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro } } // Create copy of chat body with standardized user role + filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages) bodyCopy := &models.ChatBody{ - Messages: make([]models.RoleMsg, len(chatBody.Messages)), + Messages: make([]models.RoleMsg, len(filteredMessages)), Model: chatBody.Model, Stream: chatBody.Stream, } - for i, msg := range chatBody.Messages { + for i, msg := range filteredMessages { bodyCopy.Messages[i] = msg // Standardize role if it's a user role if bodyCopy.Messages[i].Role == cfg.UserRole { -- cgit v1.2.3 From 8b162ef34f0755e2224c43499218def16d4b6845 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Sat, 17 Jan 2026 11:42:35 +0300 Subject: Enha: change textview chat history based on current user persona --- llm.go | 1 - 1 file changed, 1 deletion(-) (limited to 'llm.go') diff --git a/llm.go b/llm.go index 5599d21..cd5a3fe 100644 --- a/llm.go +++ b/llm.go @@ -180,7 +180,6 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro } prompt = sb.String() } - logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse, "msg", msg, "resume", resume, "prompt", prompt, "multimodal_data_count", len(multimodalData)) payload := models.NewLCPReq(prompt, chatBody.Model, multimodalData, defaultLCPProps, chatBody.MakeStopSlice()) -- cgit v1.2.3 From 3a11210f52a850f84771e1642cafcc3027b85075 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Sat, 31 Jan 2026 12:57:53 +0300 Subject: Enha: avoid recursion in llm calls --- llm.go | 6 ++++++ 1 file changed, 6 insertions(+) (limited to 'llm.go') diff --git a/llm.go b/llm.go index cd5a3fe..5bd7554 100644 --- a/llm.go +++ b/llm.go @@ -363,6 +363,9 @@ func (ds DeepSeekerCompletion) GetToken() string { func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader, error) { logger.Debug("formmsg deepseekercompletion", "link", cfg.CurrentAPI) + if err := deepseekModelValidator(); err != nil { + return nil, err + } if msg != "" { // otherwise let the bot to continue newMsg := models.RoleMsg{Role: role, Content: msg} newMsg = processMessageTag(newMsg) @@ -445,6 +448,9 @@ func (ds DeepSeekerChat) GetToken() string { func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, error) { logger.Debug("formmsg deepseekerchat", "link", cfg.CurrentAPI) + if err := deepseekModelValidator(); err != nil { + return nil, err + } if msg != "" { // otherwise let the bot continue newMsg := models.RoleMsg{Role: role, Content: msg} newMsg = processMessageTag(newMsg) -- cgit v1.2.3 From 6f6a35459ef4de340c0c6825da20828e7f579207 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Sun, 1 Feb 2026 11:38:51 +0300 Subject: Chore: cleaning --- llm.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'llm.go') diff --git a/llm.go b/llm.go index 5bd7554..7651a19 100644 --- a/llm.go +++ b/llm.go @@ -322,7 +322,7 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) { } } // Clean null/empty messages to prevent API issues - bodyCopy.Messages = cleanNullMessages(bodyCopy.Messages) + bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages) req := models.OpenAIReq{ ChatBody: bodyCopy, Tools: nil, @@ -488,7 +488,7 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro } } // Clean null/empty messages to prevent API issues - bodyCopy.Messages = cleanNullMessages(bodyCopy.Messages) + bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages) dsBody := models.NewDSChatReq(*bodyCopy) data, err := json.Marshal(dsBody) if err != nil { @@ -676,7 +676,7 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro } } // Clean null/empty messages to prevent API issues - bodyCopy.Messages = cleanNullMessages(bodyCopy.Messages) + bodyCopy.Messages = consolidateAssistantMessages(bodyCopy.Messages) orBody := models.NewOpenRouterChatReq(*bodyCopy, defaultLCPProps) if cfg.ToolUse && !resume && role != cfg.ToolRole { orBody.Tools = baseTools // set tools to use -- cgit v1.2.3 From c1b04303ef91709e6a0f2ec93f5ae5a1dac610ce Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Sun, 1 Feb 2026 12:53:06 +0300 Subject: Enha: persona suffix for /chat endpoints --- llm.go | 51 ++++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 48 insertions(+), 3 deletions(-) (limited to 'llm.go') diff --git a/llm.go b/llm.go index 7651a19..d6e4d13 100644 --- a/llm.go +++ b/llm.go @@ -13,6 +13,27 @@ var imageAttachmentPath string // Global variable to track image attachment for var lastImg string // for ctrl+j var RAGMsg = "Retrieved context for user's query:\n" +// addPersonaSuffixToLastUserMessage adds the persona suffix to the last user message +// to indicate to the assistant who it should reply as +func addPersonaSuffixToLastUserMessage(messages []models.RoleMsg, persona string) []models.RoleMsg { + if len(messages) == 0 { + return messages + } + + // Find the last user message to modify + for i := len(messages) - 1; i >= 0; i-- { + if messages[i].Role == cfg.UserRole || messages[i].Role == "user" { + // Create a copy of the message to avoid modifying the original + modifiedMsg := messages[i] + modifiedMsg.Content = modifiedMsg.Content + "\n" + persona + ":" + messages[i] = modifiedMsg + break + } + } + + return messages +} + // containsToolSysMsg checks if the toolSysMsg already exists in the chat body func containsToolSysMsg() bool { for _, msg := range chatBody.Messages { @@ -307,7 +328,15 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) { } } // openai /v1/chat does not support custom roles; needs to be user, assistant, system - filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages) + filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages) + + // Add persona suffix to the last user message to indicate who the assistant should reply as + if !resume && cfg.WriteNextMsgAsCompletionAgent != "" { + filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, cfg.WriteNextMsgAsCompletionAgent) + } else if !resume { + filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, botPersona) + } + bodyCopy := &models.ChatBody{ Messages: make([]models.RoleMsg, len(filteredMessages)), Model: chatBody.Model, @@ -473,7 +502,15 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages)) } } - filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages) + filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages) + + // Add persona suffix to the last user message to indicate who the assistant should reply as + if !resume && cfg.WriteNextMsgAsCompletionAgent != "" { + filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, cfg.WriteNextMsgAsCompletionAgent) + } else if !resume { + filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, botPersona) + } + bodyCopy := &models.ChatBody{ Messages: make([]models.RoleMsg, len(filteredMessages)), Model: chatBody.Model, @@ -661,7 +698,15 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro } } // Create copy of chat body with standardized user role - filteredMessages, _ := filterMessagesForCurrentCharacter(chatBody.Messages) + filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages) + + // Add persona suffix to the last user message to indicate who the assistant should reply as + if !resume && cfg.WriteNextMsgAsCompletionAgent != "" { + filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, cfg.WriteNextMsgAsCompletionAgent) + } else if !resume { + filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, botPersona) + } + bodyCopy := &models.ChatBody{ Messages: make([]models.RoleMsg, len(filteredMessages)), Model: chatBody.Model, -- cgit v1.2.3 From e52e8ce2cc44b4e8cc950fe6811810db4142921d Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Mon, 2 Feb 2026 08:18:49 +0300 Subject: Enha: consolidate assistant messages only --- llm.go | 31 ++++++++++++++----------------- 1 file changed, 14 insertions(+), 17 deletions(-) (limited to 'llm.go') diff --git a/llm.go b/llm.go index d6e4d13..a4162b7 100644 --- a/llm.go +++ b/llm.go @@ -19,18 +19,19 @@ func addPersonaSuffixToLastUserMessage(messages []models.RoleMsg, persona string if len(messages) == 0 { return messages } - - // Find the last user message to modify - for i := len(messages) - 1; i >= 0; i-- { - if messages[i].Role == cfg.UserRole || messages[i].Role == "user" { - // Create a copy of the message to avoid modifying the original - modifiedMsg := messages[i] - modifiedMsg.Content = modifiedMsg.Content + "\n" + persona + ":" - messages[i] = modifiedMsg - break - } - } - + // // Find the last user message to modify + // for i := len(messages) - 1; i >= 0; i-- { + // if messages[i].Role == cfg.UserRole || messages[i].Role == "user" { + // // Create a copy of the message to avoid modifying the original + // modifiedMsg := messages[i] + // modifiedMsg.Content = modifiedMsg.Content + "\n" + persona + ":" + // messages[i] = modifiedMsg + // break + // } + // } + modifiedMsg := messages[len(messages)-1] + modifiedMsg.Content = modifiedMsg.Content + "\n" + persona + ":\n" + messages[len(messages)-1] = modifiedMsg return messages } @@ -329,14 +330,10 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) { } // openai /v1/chat does not support custom roles; needs to be user, assistant, system filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages) - // Add persona suffix to the last user message to indicate who the assistant should reply as - if !resume && cfg.WriteNextMsgAsCompletionAgent != "" { - filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, cfg.WriteNextMsgAsCompletionAgent) - } else if !resume { + if !resume { filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, botPersona) } - bodyCopy := &models.ChatBody{ Messages: make([]models.RoleMsg, len(filteredMessages)), Model: chatBody.Model, -- cgit v1.2.3 From 343e045095419522a388aa0aa7d66ec1eced1803 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Mon, 2 Feb 2026 08:23:05 +0300 Subject: Enha: role suffix for /chat only if AutoTurn is enabled --- llm.go | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) (limited to 'llm.go') diff --git a/llm.go b/llm.go index a4162b7..b7f28de 100644 --- a/llm.go +++ b/llm.go @@ -328,10 +328,10 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) { logger.Debug("LCPChat: RAG message added to chat body", "role", ragMsg.Role, "rag_content_len", len(ragMsg.Content), "message_count_after_rag", len(chatBody.Messages)) } } - // openai /v1/chat does not support custom roles; needs to be user, assistant, system filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages) + // openai /v1/chat does not support custom roles; needs to be user, assistant, system // Add persona suffix to the last user message to indicate who the assistant should reply as - if !resume { + if cfg.AutoTurn && !resume { filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, botPersona) } bodyCopy := &models.ChatBody{ @@ -499,15 +499,12 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages)) } } + // Create copy of chat body with standardized user role filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages) - // Add persona suffix to the last user message to indicate who the assistant should reply as - if !resume && cfg.WriteNextMsgAsCompletionAgent != "" { - filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, cfg.WriteNextMsgAsCompletionAgent) - } else if !resume { + if cfg.AutoTurn && !resume { filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, botPersona) } - bodyCopy := &models.ChatBody{ Messages: make([]models.RoleMsg, len(filteredMessages)), Model: chatBody.Model, @@ -696,14 +693,10 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro } // Create copy of chat body with standardized user role filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages) - // Add persona suffix to the last user message to indicate who the assistant should reply as - if !resume && cfg.WriteNextMsgAsCompletionAgent != "" { - filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, cfg.WriteNextMsgAsCompletionAgent) - } else if !resume { + if cfg.AutoTurn && !resume { filteredMessages = addPersonaSuffixToLastUserMessage(filteredMessages, botPersona) } - bodyCopy := &models.ChatBody{ Messages: make([]models.RoleMsg, len(filteredMessages)), Model: chatBody.Model, -- cgit v1.2.3 From 76f14ce4a376bbbb99c79cc2090c067b5ba28484 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Tue, 3 Feb 2026 16:56:31 +0300 Subject: Enha: detailed error --- llm.go | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) (limited to 'llm.go') diff --git a/llm.go b/llm.go index b7f28de..e43cc71 100644 --- a/llm.go +++ b/llm.go @@ -204,7 +204,8 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro } logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse, "msg", msg, "resume", resume, "prompt", prompt, "multimodal_data_count", len(multimodalData)) - payload := models.NewLCPReq(prompt, chatBody.Model, multimodalData, defaultLCPProps, chatBody.MakeStopSlice()) + payload := models.NewLCPReq(prompt, chatBody.Model, multimodalData, + defaultLCPProps, chatBody.MakeStopSliceExcluding(botPersona, listChatRoles())) data, err := json.Marshal(payload) if err != nil { logger.Error("failed to form a msg", "error", err) @@ -436,7 +437,8 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse, "msg", msg, "resume", resume, "prompt", prompt) payload := models.NewDSCompletionReq(prompt, chatBody.Model, - defaultLCPProps["temp"], chatBody.MakeStopSlice()) + defaultLCPProps["temp"], + chatBody.MakeStopSliceExcluding(botPersona, listChatRoles())) data, err := json.Marshal(payload) if err != nil { logger.Error("failed to form a msg", "error", err) @@ -594,10 +596,11 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader if cfg.ThinkUse && !cfg.ToolUse { prompt += "" } - ss := chatBody.MakeStopSlice() + ss := chatBody.MakeStopSliceExcluding(botPersona, listChatRoles()) logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse, "msg", msg, "resume", resume, "prompt", prompt, "stop_strings", ss) - payload := models.NewOpenRouterCompletionReq(chatBody.Model, prompt, defaultLCPProps, ss) + payload := models.NewOpenRouterCompletionReq(chatBody.Model, prompt, + defaultLCPProps, ss) data, err := json.Marshal(payload) if err != nil { logger.Error("failed to form a msg", "error", err) -- cgit v1.2.3 From 79861e7c2bc6f2ed95309ca6e83577ddc4e2c63a Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Wed, 4 Feb 2026 11:22:17 +0300 Subject: Enha: privateMessageResp with resume --- llm.go | 39 ++++++++++++++++++++++++--------------- 1 file changed, 24 insertions(+), 15 deletions(-) (limited to 'llm.go') diff --git a/llm.go b/llm.go index e43cc71..30fc0ec 100644 --- a/llm.go +++ b/llm.go @@ -138,7 +138,8 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro if localImageAttachmentPath != "" { imageURL, err := models.CreateImageURLFromPath(localImageAttachmentPath) if err != nil { - logger.Error("failed to create image URL from path for completion", "error", err, "path", localImageAttachmentPath) + logger.Error("failed to create image URL from path for completion", + "error", err, "path", localImageAttachmentPath) return nil, err } // Extract base64 part from data URL (e.g., "data:image/jpeg;base64,...") @@ -166,15 +167,16 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro logger.Error("failed to form a rag msg", "error", err) return nil, err } - logger.Debug("RAG response received", "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)]) + logger.Debug("RAG response received", "response_len", len(ragResp), + "response_preview", ragResp[:min(len(ragResp), 100)]) // Use system role for RAG context to avoid conflicts with tool usage ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp} chatBody.Messages = append(chatBody.Messages, ragMsg) logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages)) } } + // sending description of the tools and how to use them if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() { - // add to chat body chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) } filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages) @@ -310,7 +312,8 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) { } newMsg = processMessageTag(newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) - logger.Debug("LCPChat FormMsg: added message to chatBody", "role", newMsg.Role, "content_len", len(newMsg.Content), "message_count_after_add", len(chatBody.Messages)) + logger.Debug("LCPChat FormMsg: added message to chatBody", "role", newMsg.Role, + "content_len", len(newMsg.Content), "message_count_after_add", len(chatBody.Messages)) } if !resume { // if rag - add as system message to avoid conflicts with tool usage @@ -322,11 +325,13 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) { logger.Error("LCPChat: failed to form a rag msg", "error", err) return nil, err } - logger.Debug("LCPChat: RAG response received", "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)]) + logger.Debug("LCPChat: RAG response received", + "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)]) // Use system role for RAG context to avoid conflicts with tool usage ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp} chatBody.Messages = append(chatBody.Messages, ragMsg) - logger.Debug("LCPChat: RAG message added to chat body", "role", ragMsg.Role, "rag_content_len", len(ragMsg.Content), "message_count_after_rag", len(chatBody.Messages)) + logger.Debug("LCPChat: RAG message added to chat body", "role", ragMsg.Role, + "rag_content_len", len(ragMsg.Content), "message_count_after_rag", len(chatBody.Messages)) } } filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages) @@ -409,15 +414,16 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader logger.Error("DeepSeekerCompletion: failed to form a rag msg", "error", err) return nil, err } - logger.Debug("DeepSeekerCompletion: RAG response received", "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)]) + logger.Debug("DeepSeekerCompletion: RAG response received", + "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)]) // Use system role for RAG context to avoid conflicts with tool usage ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp} chatBody.Messages = append(chatBody.Messages, ragMsg) logger.Debug("DeepSeekerCompletion: RAG message added to chat body", "message_count", len(chatBody.Messages)) } } + // sending description of the tools and how to use them if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() { - // add to chat body chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) } filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages) @@ -494,7 +500,8 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro logger.Error("failed to form a rag msg", "error", err) return nil, err } - logger.Debug("RAG response received", "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)]) + logger.Debug("RAG response received", "response_len", len(ragResp), + "response_preview", ragResp[:min(len(ragResp), 100)]) // Use system role for RAG context to avoid conflicts with tool usage ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp} chatBody.Messages = append(chatBody.Messages, ragMsg) @@ -571,15 +578,16 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader logger.Error("failed to form a rag msg", "error", err) return nil, err } - logger.Debug("RAG response received", "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)]) + logger.Debug("RAG response received", "response_len", + len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)]) // Use system role for RAG context to avoid conflicts with tool usage ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp} chatBody.Messages = append(chatBody.Messages, ragMsg) logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages)) } } + // sending description of the tools and how to use them if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() { - // add to chat body chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) } filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages) @@ -596,11 +604,11 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader if cfg.ThinkUse && !cfg.ToolUse { prompt += "" } - ss := chatBody.MakeStopSliceExcluding(botPersona, listChatRoles()) + stopSlice := chatBody.MakeStopSliceExcluding(botPersona, listChatRoles()) logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse, - "msg", msg, "resume", resume, "prompt", prompt, "stop_strings", ss) + "msg", msg, "resume", resume, "prompt", prompt, "stop_strings", stopSlice) payload := models.NewOpenRouterCompletionReq(chatBody.Model, prompt, - defaultLCPProps, ss) + defaultLCPProps, stopSlice) data, err := json.Marshal(payload) if err != nil { logger.Error("failed to form a msg", "error", err) @@ -687,7 +695,8 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro logger.Error("failed to form a rag msg", "error", err) return nil, err } - logger.Debug("RAG response received", "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)]) + logger.Debug("RAG response received", "response_len", len(ragResp), + "response_preview", ragResp[:min(len(ragResp), 100)]) // Use system role for RAG context to avoid conflicts with tool usage ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp} chatBody.Messages = append(chatBody.Messages, ragMsg) -- cgit v1.2.3 From 7187df509fe9cc506695a1036b840e03eeb25cff Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Wed, 4 Feb 2026 12:47:54 +0300 Subject: Enha: stricter stop string --- llm.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'llm.go') diff --git a/llm.go b/llm.go index 30fc0ec..95de1d8 100644 --- a/llm.go +++ b/llm.go @@ -207,7 +207,7 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse, "msg", msg, "resume", resume, "prompt", prompt, "multimodal_data_count", len(multimodalData)) payload := models.NewLCPReq(prompt, chatBody.Model, multimodalData, - defaultLCPProps, chatBody.MakeStopSliceExcluding(botPersona, listChatRoles())) + defaultLCPProps, chatBody.MakeStopSliceExcluding("", listChatRoles())) data, err := json.Marshal(payload) if err != nil { logger.Error("failed to form a msg", "error", err) @@ -444,7 +444,7 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader "msg", msg, "resume", resume, "prompt", prompt) payload := models.NewDSCompletionReq(prompt, chatBody.Model, defaultLCPProps["temp"], - chatBody.MakeStopSliceExcluding(botPersona, listChatRoles())) + chatBody.MakeStopSliceExcluding("", listChatRoles())) data, err := json.Marshal(payload) if err != nil { logger.Error("failed to form a msg", "error", err) @@ -604,7 +604,7 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader if cfg.ThinkUse && !cfg.ToolUse { prompt += "" } - stopSlice := chatBody.MakeStopSliceExcluding(botPersona, listChatRoles()) + stopSlice := chatBody.MakeStopSliceExcluding("", listChatRoles()) logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse, "msg", msg, "resume", resume, "prompt", prompt, "stop_strings", stopSlice) payload := models.NewOpenRouterCompletionReq(chatBody.Model, prompt, -- cgit v1.2.3 From 478a505869bf26b15dcbc77feb2c09c1f2ff4aac Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Fri, 6 Feb 2026 11:32:06 +0300 Subject: Enha: client stop string for completion only --- llm.go | 27 ++++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) (limited to 'llm.go') diff --git a/llm.go b/llm.go index 95de1d8..b2cd5e2 100644 --- a/llm.go +++ b/llm.go @@ -78,6 +78,7 @@ type ChunkParser interface { ParseChunk([]byte) (*models.TextChunk, error) FormMsg(msg, role string, cont bool) (io.Reader, error) GetToken() string + GetAPIType() models.APIType } func choseChunkParser() { @@ -127,6 +128,10 @@ type OpenRouterChat struct { Model string } +func (lcp LCPCompletion) GetAPIType() models.APIType { + return models.APITypeCompletion +} + func (lcp LCPCompletion) GetToken() string { return "" } @@ -233,7 +238,11 @@ func (lcp LCPCompletion) ParseChunk(data []byte) (*models.TextChunk, error) { return resp, nil } -func (op LCPChat) GetToken() string { +func (lcp LCPChat) GetAPIType() models.APIType { + return models.APITypeChat +} + +func (lcp LCPChat) GetToken() string { return "" } @@ -371,6 +380,10 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) { } // deepseek +func (ds DeepSeekerCompletion) GetAPIType() models.APIType { + return models.APITypeCompletion +} + func (ds DeepSeekerCompletion) ParseChunk(data []byte) (*models.TextChunk, error) { llmchunk := models.DSCompletionResp{} if err := json.Unmarshal(data, &llmchunk); err != nil { @@ -453,6 +466,10 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader return bytes.NewReader(data), nil } +func (ds DeepSeekerChat) GetAPIType() models.APIType { + return models.APITypeChat +} + func (ds DeepSeekerChat) ParseChunk(data []byte) (*models.TextChunk, error) { llmchunk := models.DSChatStreamResp{} if err := json.Unmarshal(data, &llmchunk); err != nil { @@ -539,6 +556,10 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro } // openrouter +func (or OpenRouterCompletion) GetAPIType() models.APIType { + return models.APITypeCompletion +} + func (or OpenRouterCompletion) ParseChunk(data []byte) (*models.TextChunk, error) { llmchunk := models.OpenRouterCompletionResp{} if err := json.Unmarshal(data, &llmchunk); err != nil { @@ -618,6 +639,10 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader } // chat +func (or OpenRouterChat) GetAPIType() models.APIType { + return models.APITypeChat +} + func (or OpenRouterChat) ParseChunk(data []byte) (*models.TextChunk, error) { llmchunk := models.OpenRouterChatResp{} if err := json.Unmarshal(data, &llmchunk); err != nil { -- cgit v1.2.3 From 4af866079c3f21eab12b02c3158567539ca40c50 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Fri, 6 Feb 2026 12:42:06 +0300 Subject: Chore: linter complaints --- llm.go | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) (limited to 'llm.go') diff --git a/llm.go b/llm.go index b2cd5e2..c77495e 100644 --- a/llm.go +++ b/llm.go @@ -159,7 +159,7 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro } if msg != "" { // otherwise let the bot to continue newMsg := models.RoleMsg{Role: role, Content: msg} - newMsg = processMessageTag(newMsg) + newMsg = *processMessageTag(&newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } if !resume { @@ -319,7 +319,7 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) { // Create a simple text message newMsg = models.NewRoleMsg(role, msg) } - newMsg = processMessageTag(newMsg) + newMsg = *processMessageTag(&newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) logger.Debug("LCPChat FormMsg: added message to chatBody", "role", newMsg.Role, "content_len", len(newMsg.Content), "message_count_after_add", len(chatBody.Messages)) @@ -413,7 +413,7 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader } if msg != "" { // otherwise let the bot to continue newMsg := models.RoleMsg{Role: role, Content: msg} - newMsg = processMessageTag(newMsg) + newMsg = *processMessageTag(&newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } if !resume { @@ -504,7 +504,7 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro } if msg != "" { // otherwise let the bot continue newMsg := models.RoleMsg{Role: role, Content: msg} - newMsg = processMessageTag(newMsg) + newMsg = *processMessageTag(&newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } if !resume { @@ -586,7 +586,7 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader logger.Debug("formmsg openroutercompletion", "link", cfg.CurrentAPI) if msg != "" { // otherwise let the bot to continue newMsg := models.RoleMsg{Role: role, Content: msg} - newMsg = processMessageTag(newMsg) + newMsg = *processMessageTag(&newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } if !resume { @@ -707,7 +707,7 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro // Create a simple text message newMsg = models.NewRoleMsg(role, msg) } - newMsg = processMessageTag(newMsg) + newMsg = *processMessageTag(&newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } if !resume { -- cgit v1.2.3 From 1bf9e6eef72ec2eec7282b1554b41a0dc3d8d1b8 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Sun, 8 Feb 2026 21:50:03 +0300 Subject: Enha: extract first valid recipient from knownto --- llm.go | 199 +++++++++++++++++++++++++++++++---------------------------------- 1 file changed, 94 insertions(+), 105 deletions(-) (limited to 'llm.go') diff --git a/llm.go b/llm.go index c77495e..734b4fd 100644 --- a/llm.go +++ b/llm.go @@ -59,17 +59,19 @@ func ClearImageAttachment() { // filterMessagesForCurrentCharacter filters messages based on char-specific context. // Returns filtered messages and the bot persona role (target character). func filterMessagesForCurrentCharacter(messages []models.RoleMsg) ([]models.RoleMsg, string) { - if cfg == nil || !cfg.CharSpecificContextEnabled { - botPersona := cfg.AssistantRole - if cfg.WriteNextMsgAsCompletionAgent != "" { - botPersona = cfg.WriteNextMsgAsCompletionAgent - } - return messages, botPersona - } botPersona := cfg.AssistantRole if cfg.WriteNextMsgAsCompletionAgent != "" { botPersona = cfg.WriteNextMsgAsCompletionAgent } + if cfg == nil || !cfg.CharSpecificContextEnabled { + return messages, botPersona + } + // get last message (written by user) and checck if it has a tag + lm := messages[len(messages)-1] + recipient, ok := getValidKnowToRecipient(&lm) + if ok && recipient != "" { + botPersona = recipient + } filtered := filterMessagesForCharacter(messages, botPersona) return filtered, botPersona } @@ -162,23 +164,21 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro newMsg = *processMessageTag(&newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } - if !resume { - // if rag - add as system message to avoid conflicts with tool usage - if cfg.RAGEnabled { - um := chatBody.Messages[len(chatBody.Messages)-1].Content - logger.Debug("RAG is enabled, preparing RAG context", "user_message", um) - ragResp, err := chatRagUse(um) - if err != nil { - logger.Error("failed to form a rag msg", "error", err) - return nil, err - } - logger.Debug("RAG response received", "response_len", len(ragResp), - "response_preview", ragResp[:min(len(ragResp), 100)]) - // Use system role for RAG context to avoid conflicts with tool usage - ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp} - chatBody.Messages = append(chatBody.Messages, ragMsg) - logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages)) + // if rag - add as system message to avoid conflicts with tool usage + if !resume && cfg.RAGEnabled { + um := chatBody.Messages[len(chatBody.Messages)-1].Content + logger.Debug("RAG is enabled, preparing RAG context", "user_message", um) + ragResp, err := chatRagUse(um) + if err != nil { + logger.Error("failed to form a rag msg", "error", err) + return nil, err } + logger.Debug("RAG response received", "response_len", len(ragResp), + "response_preview", ragResp[:min(len(ragResp), 100)]) + // Use system role for RAG context to avoid conflicts with tool usage + ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp} + chatBody.Messages = append(chatBody.Messages, ragMsg) + logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages)) } // sending description of the tools and how to use them if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() { @@ -324,24 +324,22 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) { logger.Debug("LCPChat FormMsg: added message to chatBody", "role", newMsg.Role, "content_len", len(newMsg.Content), "message_count_after_add", len(chatBody.Messages)) } - if !resume { - // if rag - add as system message to avoid conflicts with tool usage - if cfg.RAGEnabled { - um := chatBody.Messages[len(chatBody.Messages)-1].Content - logger.Debug("LCPChat: RAG is enabled, preparing RAG context", "user_message", um) - ragResp, err := chatRagUse(um) - if err != nil { - logger.Error("LCPChat: failed to form a rag msg", "error", err) - return nil, err - } - logger.Debug("LCPChat: RAG response received", - "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)]) - // Use system role for RAG context to avoid conflicts with tool usage - ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp} - chatBody.Messages = append(chatBody.Messages, ragMsg) - logger.Debug("LCPChat: RAG message added to chat body", "role", ragMsg.Role, - "rag_content_len", len(ragMsg.Content), "message_count_after_rag", len(chatBody.Messages)) + // if rag - add as system message to avoid conflicts with tool usage + if !resume && cfg.RAGEnabled { + um := chatBody.Messages[len(chatBody.Messages)-1].Content + logger.Debug("LCPChat: RAG is enabled, preparing RAG context", "user_message", um) + ragResp, err := chatRagUse(um) + if err != nil { + logger.Error("LCPChat: failed to form a rag msg", "error", err) + return nil, err } + logger.Debug("LCPChat: RAG response received", + "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)]) + // Use system role for RAG context to avoid conflicts with tool usage + ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp} + chatBody.Messages = append(chatBody.Messages, ragMsg) + logger.Debug("LCPChat: RAG message added to chat body", "role", ragMsg.Role, + "rag_content_len", len(ragMsg.Content), "message_count_after_rag", len(chatBody.Messages)) } filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages) // openai /v1/chat does not support custom roles; needs to be user, assistant, system @@ -416,24 +414,21 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader newMsg = *processMessageTag(&newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } - if !resume { - // if rag - add as system message to avoid conflicts with tool usage - // TODO: perhaps RAG should be a func/tool call instead? - if cfg.RAGEnabled { - um := chatBody.Messages[len(chatBody.Messages)-1].Content - logger.Debug("DeepSeekerCompletion: RAG is enabled, preparing RAG context", "user_message", um) - ragResp, err := chatRagUse(um) - if err != nil { - logger.Error("DeepSeekerCompletion: failed to form a rag msg", "error", err) - return nil, err - } - logger.Debug("DeepSeekerCompletion: RAG response received", - "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)]) - // Use system role for RAG context to avoid conflicts with tool usage - ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp} - chatBody.Messages = append(chatBody.Messages, ragMsg) - logger.Debug("DeepSeekerCompletion: RAG message added to chat body", "message_count", len(chatBody.Messages)) + // if rag - add as system message to avoid conflicts with tool usage + if !resume && cfg.RAGEnabled { + um := chatBody.Messages[len(chatBody.Messages)-1].Content + logger.Debug("DeepSeekerCompletion: RAG is enabled, preparing RAG context", "user_message", um) + ragResp, err := chatRagUse(um) + if err != nil { + logger.Error("DeepSeekerCompletion: failed to form a rag msg", "error", err) + return nil, err } + logger.Debug("DeepSeekerCompletion: RAG response received", + "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)]) + // Use system role for RAG context to avoid conflicts with tool usage + ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp} + chatBody.Messages = append(chatBody.Messages, ragMsg) + logger.Debug("DeepSeekerCompletion: RAG message added to chat body", "message_count", len(chatBody.Messages)) } // sending description of the tools and how to use them if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() { @@ -507,23 +502,21 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro newMsg = *processMessageTag(&newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } - if !resume { - // if rag - add as system message to avoid conflicts with tool usage - if cfg.RAGEnabled { - um := chatBody.Messages[len(chatBody.Messages)-1].Content - logger.Debug("RAG is enabled, preparing RAG context", "user_message", um) - ragResp, err := chatRagUse(um) - if err != nil { - logger.Error("failed to form a rag msg", "error", err) - return nil, err - } - logger.Debug("RAG response received", "response_len", len(ragResp), - "response_preview", ragResp[:min(len(ragResp), 100)]) - // Use system role for RAG context to avoid conflicts with tool usage - ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp} - chatBody.Messages = append(chatBody.Messages, ragMsg) - logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages)) + // if rag - add as system message to avoid conflicts with tool usage + if !resume && cfg.RAGEnabled { + um := chatBody.Messages[len(chatBody.Messages)-1].Content + logger.Debug("RAG is enabled, preparing RAG context", "user_message", um) + ragResp, err := chatRagUse(um) + if err != nil { + logger.Error("failed to form a rag msg", "error", err) + return nil, err } + logger.Debug("RAG response received", "response_len", len(ragResp), + "response_preview", ragResp[:min(len(ragResp), 100)]) + // Use system role for RAG context to avoid conflicts with tool usage + ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp} + chatBody.Messages = append(chatBody.Messages, ragMsg) + logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages)) } // Create copy of chat body with standardized user role filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages) @@ -589,23 +582,21 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader newMsg = *processMessageTag(&newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } - if !resume { - // if rag - add as system message to avoid conflicts with tool usage - if cfg.RAGEnabled { - um := chatBody.Messages[len(chatBody.Messages)-1].Content - logger.Debug("RAG is enabled, preparing RAG context", "user_message", um) - ragResp, err := chatRagUse(um) - if err != nil { - logger.Error("failed to form a rag msg", "error", err) - return nil, err - } - logger.Debug("RAG response received", "response_len", - len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)]) - // Use system role for RAG context to avoid conflicts with tool usage - ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp} - chatBody.Messages = append(chatBody.Messages, ragMsg) - logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages)) + // if rag - add as system message to avoid conflicts with tool usage + if !resume && cfg.RAGEnabled { + um := chatBody.Messages[len(chatBody.Messages)-1].Content + logger.Debug("RAG is enabled, preparing RAG context", "user_message", um) + ragResp, err := chatRagUse(um) + if err != nil { + logger.Error("failed to form a rag msg", "error", err) + return nil, err } + logger.Debug("RAG response received", "response_len", + len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)]) + // Use system role for RAG context to avoid conflicts with tool usage + ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp} + chatBody.Messages = append(chatBody.Messages, ragMsg) + logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages)) } // sending description of the tools and how to use them if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() { @@ -710,23 +701,21 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro newMsg = *processMessageTag(&newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } - if !resume { - // if rag - add as system message to avoid conflicts with tool usage - if cfg.RAGEnabled { - um := chatBody.Messages[len(chatBody.Messages)-1].Content - logger.Debug("RAG is enabled, preparing RAG context", "user_message", um) - ragResp, err := chatRagUse(um) - if err != nil { - logger.Error("failed to form a rag msg", "error", err) - return nil, err - } - logger.Debug("RAG response received", "response_len", len(ragResp), - "response_preview", ragResp[:min(len(ragResp), 100)]) - // Use system role for RAG context to avoid conflicts with tool usage - ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp} - chatBody.Messages = append(chatBody.Messages, ragMsg) - logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages)) + // if rag - add as system message to avoid conflicts with tool usage + if !resume && cfg.RAGEnabled { + um := chatBody.Messages[len(chatBody.Messages)-1].Content + logger.Debug("RAG is enabled, preparing RAG context", "user_message", um) + ragResp, err := chatRagUse(um) + if err != nil { + logger.Error("failed to form a rag msg", "error", err) + return nil, err } + logger.Debug("RAG response received", "response_len", len(ragResp), + "response_preview", ragResp[:min(len(ragResp), 100)]) + // Use system role for RAG context to avoid conflicts with tool usage + ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp} + chatBody.Messages = append(chatBody.Messages, ragMsg) + logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages)) } // Create copy of chat body with standardized user role filteredMessages, botPersona := filterMessagesForCurrentCharacter(chatBody.Messages) -- cgit v1.2.3