summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGrail Finder <wohilas@gmail.com>2025-12-10 16:02:16 +0300
committerGrail Finder <wohilas@gmail.com>2025-12-10 16:02:16 +0300
commitdaa8af2ce2cac56ddc98499553eb7a6a5e21857f (patch)
treeb4fa72cc87c5ff4b29679bf2691f2f931e0b9111
parentad36d1c3e0b545c3e0517ec384087075ad77f63e (diff)
Fix: rag on regen
-rw-r--r--bot.go28
-rw-r--r--llm.go59
2 files changed, 70 insertions, 17 deletions
diff --git a/bot.go b/bot.go
index 5c8ca1e..4ee0546 100644
--- a/bot.go
+++ b/bot.go
@@ -412,8 +412,10 @@ func sendMsgToLLM(body io.Reader) {
}
func chatRagUse(qText string) (string, error) {
+ logger.Debug("Starting RAG query", "original_query", qText)
tokenizer, err := english.NewSentenceTokenizer(nil)
if err != nil {
+ logger.Error("failed to create sentence tokenizer", "error", err)
return "", err
}
// this where llm should find the questions in text and ask them
@@ -421,14 +423,24 @@ func chatRagUse(qText string) (string, error) {
questions := make([]string, len(questionsS))
for i, q := range questionsS {
questions[i] = q.Text
+ logger.Debug("RAG question extracted", "index", i, "question", q.Text)
}
+
+ if len(questions) == 0 {
+ logger.Warn("No questions extracted from query text", "query", qText)
+ return "No related results from RAG vector storage.", nil
+ }
+
respVecs := []models.VectorRow{}
for i, q := range questions {
+ logger.Debug("Processing RAG question", "index", i, "question", q)
emb, err := ragger.LineToVector(q)
if err != nil {
- logger.Error("failed to get embs", "error", err, "index", i, "question", q)
+ logger.Error("failed to get embeddings for RAG", "error", err, "index", i, "question", q)
continue
}
+ logger.Debug("Got embeddings for question", "index", i, "question_len", len(q), "embedding_len", len(emb))
+
// Create EmbeddingResp struct for the search
embeddingResp := &models.EmbeddingResp{
Embedding: emb,
@@ -436,21 +448,29 @@ func chatRagUse(qText string) (string, error) {
}
vecs, err := ragger.SearchEmb(embeddingResp)
if err != nil {
- logger.Error("failed to query embs", "error", err, "index", i, "question", q)
+ logger.Error("failed to query embeddings in RAG", "error", err, "index", i, "question", q)
continue
}
+ logger.Debug("RAG search returned vectors", "index", i, "question", q, "vector_count", len(vecs))
respVecs = append(respVecs, vecs...)
}
+
// get raw text
resps := []string{}
- logger.Debug("rag query resp", "vecs len", len(respVecs))
+ logger.Debug("RAG query final results", "total_vecs_found", len(respVecs))
for _, rv := range respVecs {
resps = append(resps, rv.RawText)
+ logger.Debug("RAG result", "slug", rv.Slug, "filename", rv.FileName, "raw_text_len", len(rv.RawText))
}
+
if len(resps) == 0 {
+ logger.Info("No RAG results found for query", "original_query", qText, "question_count", len(questions))
return "No related results from RAG vector storage.", nil
}
- return strings.Join(resps, "\n"), nil
+
+ result := strings.Join(resps, "\n")
+ logger.Debug("RAG query completed", "result_len", len(result), "response_count", len(resps))
+ return result, nil
}
func roleToIcon(role string) string {
diff --git a/llm.go b/llm.go
index d9900de..4072197 100644
--- a/llm.go
+++ b/llm.go
@@ -85,7 +85,6 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro
logger.Debug("formmsg lcpcompletion", "link", cfg.CurrentAPI)
localImageAttachmentPath := imageAttachmentPath
var multimodalData []string
-
if localImageAttachmentPath != "" {
imageURL, err := models.CreateImageURLFromPath(localImageAttachmentPath)
if err != nil {
@@ -102,20 +101,25 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro
}
imageAttachmentPath = "" // Clear the attachment after use
}
-
if msg != "" { // otherwise let the bot to continue
newMsg := models.RoleMsg{Role: role, Content: msg}
chatBody.Messages = append(chatBody.Messages, newMsg)
+ }
+ if !resume {
// if rag - add as system message to avoid conflicts with tool usage
if cfg.RAGEnabled {
- ragResp, err := chatRagUse(newMsg.Content)
+ um := chatBody.Messages[len(chatBody.Messages)-1].Content
+ logger.Debug("RAG is enabled, preparing RAG context", "user_message", um)
+ ragResp, err := chatRagUse(um)
if err != nil {
logger.Error("failed to form a rag msg", "error", err)
return nil, err
}
+ logger.Debug("RAG response received", "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)])
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
+ logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages))
}
}
if cfg.ToolUse && !resume {
@@ -146,7 +150,7 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro
var sb strings.Builder
sb.WriteString(prompt)
for range multimodalData {
- sb.WriteString(" <__media__>") // llama.cpp default multimodal marker
+ sb.WriteString(" <__media__>") // llama.cpp default multimodal marker
}
prompt = sb.String()
}
@@ -258,18 +262,22 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
}
chatBody.Messages = append(chatBody.Messages, newMsg)
logger.Debug("LCPChat FormMsg: added message to chatBody", "role", newMsg.Role, "content_len", len(newMsg.Content), "message_count_after_add", len(chatBody.Messages))
-
+ }
+ if !resume {
// if rag - add as system message to avoid conflicts with tool usage
if cfg.RAGEnabled {
- ragResp, err := chatRagUse(newMsg.Content)
+ um := chatBody.Messages[len(chatBody.Messages)-1].Content
+ logger.Debug("LCPChat: RAG is enabled, preparing RAG context", "user_message", um)
+ ragResp, err := chatRagUse(um)
if err != nil {
- logger.Error("failed to form a rag msg", "error", err)
+ logger.Error("LCPChat: failed to form a rag msg", "error", err)
return nil, err
}
+ logger.Debug("LCPChat: RAG response received", "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)])
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
- logger.Debug("LCPChat FormMsg: added RAG message to chatBody", "role", ragMsg.Role, "rag_content_len", len(ragMsg.Content), "message_count_after_rag", len(chatBody.Messages))
+ logger.Debug("LCPChat: RAG message added to chat body", "role", ragMsg.Role, "rag_content_len", len(ragMsg.Content), "message_count_after_rag", len(chatBody.Messages))
}
}
// openai /v1/chat does not support custom roles; needs to be user, assistant, system
@@ -331,16 +339,23 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader
if msg != "" { // otherwise let the bot to continue
newMsg := models.RoleMsg{Role: role, Content: msg}
chatBody.Messages = append(chatBody.Messages, newMsg)
+ }
+ if !resume {
// if rag - add as system message to avoid conflicts with tool usage
+ // TODO: perhaps RAG should be a func/tool call instead?
if cfg.RAGEnabled {
- ragResp, err := chatRagUse(newMsg.Content)
+ um := chatBody.Messages[len(chatBody.Messages)-1].Content
+ logger.Debug("DeepSeekerCompletion: RAG is enabled, preparing RAG context", "user_message", um)
+ ragResp, err := chatRagUse(um)
if err != nil {
- logger.Error("failed to form a rag msg", "error", err)
+ logger.Error("DeepSeekerCompletion: failed to form a rag msg", "error", err)
return nil, err
}
+ logger.Debug("DeepSeekerCompletion: RAG response received", "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)])
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
+ logger.Debug("DeepSeekerCompletion: RAG message added to chat body", "message_count", len(chatBody.Messages))
}
}
if cfg.ToolUse && !resume {
@@ -413,16 +428,22 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
if msg != "" { // otherwise let the bot continue
newMsg := models.RoleMsg{Role: role, Content: msg}
chatBody.Messages = append(chatBody.Messages, newMsg)
+ }
+ if !resume {
// if rag - add as system message to avoid conflicts with tool usage
if cfg.RAGEnabled {
- ragResp, err := chatRagUse(newMsg.Content)
+ um := chatBody.Messages[len(chatBody.Messages)-1].Content
+ logger.Debug("RAG is enabled, preparing RAG context", "user_message", um)
+ ragResp, err := chatRagUse(um)
if err != nil {
logger.Error("failed to form a rag msg", "error", err)
return nil, err
}
+ logger.Debug("RAG response received", "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)])
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
+ logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages))
}
}
bodyCopy := &models.ChatBody{
@@ -477,16 +498,22 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader
if msg != "" { // otherwise let the bot to continue
newMsg := models.RoleMsg{Role: role, Content: msg}
chatBody.Messages = append(chatBody.Messages, newMsg)
+ }
+ if !resume {
// if rag - add as system message to avoid conflicts with tool usage
if cfg.RAGEnabled {
- ragResp, err := chatRagUse(newMsg.Content)
+ um := chatBody.Messages[len(chatBody.Messages)-1].Content
+ logger.Debug("RAG is enabled, preparing RAG context", "user_message", um)
+ ragResp, err := chatRagUse(um)
if err != nil {
logger.Error("failed to form a rag msg", "error", err)
return nil, err
}
+ logger.Debug("RAG response received", "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)])
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
+ logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages))
}
}
if cfg.ToolUse && !resume {
@@ -588,16 +615,22 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
newMsg = models.NewRoleMsg(role, msg)
}
chatBody.Messages = append(chatBody.Messages, newMsg)
+ }
+ if !resume {
// if rag - add as system message to avoid conflicts with tool usage
if cfg.RAGEnabled {
- ragResp, err := chatRagUse(newMsg.Content)
+ um := chatBody.Messages[len(chatBody.Messages)-1].Content
+ logger.Debug("RAG is enabled, preparing RAG context", "user_message", um)
+ ragResp, err := chatRagUse(um)
if err != nil {
logger.Error("failed to form a rag msg", "error", err)
return nil, err
}
+ logger.Debug("RAG response received", "response_len", len(ragResp), "response_preview", ragResp[:min(len(ragResp), 100)])
// Use system role for RAG context to avoid conflicts with tool usage
ragMsg := models.RoleMsg{Role: "system", Content: RAGMsg + ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
+ logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages))
}
}
// Create copy of chat body with standardized user role