diff options
| author | Grail Finder <wohilas@gmail.com> | 2025-12-28 00:06:09 +0300 |
|---|---|---|
| committer | Grail Finder <wohilas@gmail.com> | 2025-12-28 00:06:09 +0300 |
| commit | cae7dea2e7c149edd055b542a2a08b53af8a0ac8 (patch) | |
| tree | 5e387c1e6197b91b945fbff01218df45cad86ca2 | |
| parent | 29becaa8c83c8dff8af7623ca71e8021319887ab (diff) | |
| parent | 9d91685e9adde94d20313fb405c4301b4dd59a75 (diff) | |
Merge branch 'master' into doc/tutorial
| -rw-r--r-- | bot.go | 38 | ||||
| -rw-r--r-- | config.example.toml | 1 | ||||
| -rw-r--r-- | config/config.go | 1 | ||||
| -rw-r--r-- | helpfuncs.go | 3 | ||||
| -rw-r--r-- | llm.go | 21 | ||||
| -rw-r--r-- | main.go | 2 | ||||
| -rw-r--r-- | props_table.go | 3 | ||||
| -rw-r--r-- | tables.go | 19 | ||||
| -rw-r--r-- | tools.go | 13 | ||||
| -rw-r--r-- | tui.go | 5 |
10 files changed, 82 insertions, 24 deletions
@@ -88,6 +88,10 @@ func cleanNullMessages(messages []models.RoleMsg) []models.RoleMsg { } func cleanToolCalls(messages []models.RoleMsg) []models.RoleMsg { + // If AutoCleanToolCallsFromCtx is false, keep tool call messages in context + if cfg != nil && !cfg.AutoCleanToolCallsFromCtx { + return consolidateConsecutiveAssistantMessages(messages) + } cleaned := make([]models.RoleMsg, 0, len(messages)) for i, msg := range messages { // recognize the message as the tool call and remove it @@ -731,7 +735,7 @@ func cleanChatBody() { for i, msg := range chatBody.Messages { logger.Debug("cleanChatBody: before clean", "index", i, "role", msg.Role, "content_len", len(msg.Content), "has_content", msg.HasContent(), "tool_call_id", msg.ToolCallID) } - // TODO: consider case where we keep tool requests + // Tool request cleaning is now configurable via AutoCleanToolCallsFromCtx (default false) // /completion msg where part meant for user and other part tool call chatBody.Messages = cleanToolCalls(chatBody.Messages) chatBody.Messages = cleanNullMessages(chatBody.Messages) @@ -1029,6 +1033,38 @@ func refreshLocalModelsIfEmpty() { localModelsMu.Unlock() } +func summarizeAndStartNewChat() { + if len(chatBody.Messages) == 0 { + _ = notifyUser("info", "No chat history to summarize") + return + } + _ = notifyUser("info", "Summarizing chat history...") + // Call the summarize_chat tool via agent + summaryBytes := callToolWithAgent("summarize_chat", map[string]string{}) + summary := string(summaryBytes) + if summary == "" { + _ = notifyUser("error", "Failed to generate summary") + return + } + // Start a new chat + startNewChat() + // Inject summary as a tool call response + toolMsg := models.RoleMsg{ + Role: cfg.ToolRole, + Content: summary, + ToolCallID: "", + } + chatBody.Messages = append(chatBody.Messages, toolMsg) + // Update UI + textView.SetText(chatToText(cfg.ShowSys)) + colorText() + // Update storage + if err := updateStorageChat(activeChatName, chatBody.Messages); err != nil { + logger.Warn("failed to update storage after injecting summary", "error", err) + } + _ = notifyUser("info", "Chat summarized and new chat started with summary as tool response") +} + func init() { var err error cfg, err = config.LoadConfig("config.toml") diff --git a/config.example.toml b/config.example.toml index 113b7ea..594e4da 100644 --- a/config.example.toml +++ b/config.example.toml @@ -18,6 +18,7 @@ ToolRole = "tool" AssistantRole = "assistant" SysDir = "sysprompts" ChunkLimit = 100000 +# AutoCleanToolCallsFromCtx = false # rag settings RAGBatchSize = 1 RAGWordLimit = 80 diff --git a/config/config.go b/config/config.go index 5b7cc35..112986b 100644 --- a/config/config.go +++ b/config/config.go @@ -31,6 +31,7 @@ type Config struct { WriteNextMsgAs string WriteNextMsgAsCompletionAgent string SkipLLMResp bool + AutoCleanToolCallsFromCtx bool `toml:"AutoCleanToolCallsFromCtx"` // embeddings RAGEnabled bool `toml:"RAGEnabled"` EmbedURL string `toml:"EmbedURL"` diff --git a/helpfuncs.go b/helpfuncs.go index ff4d806..0710fe8 100644 --- a/helpfuncs.go +++ b/helpfuncs.go @@ -227,7 +227,6 @@ func makeStatusLine() string { } else { imageInfo = "" } - // Add shell mode status to status line var shellModeInfo string if shellMode { @@ -235,9 +234,9 @@ func makeStatusLine() string { } else { shellModeInfo = "" } - statusLine := fmt.Sprintf(indexLineCompletion, botRespMode, activeChatName, cfg.ToolUse, chatBody.Model, cfg.SkipLLMResp, cfg.CurrentAPI, cfg.ThinkUse, + cfg.ToolUse, chatBody.Model, cfg.SkipLLMResp, cfg.CurrentAPI, isRecording, persona, botPersona, injectRole) return statusLine + imageInfo + shellModeInfo } @@ -13,6 +13,16 @@ var imageAttachmentPath string // Global variable to track image attachment for var lastImg string // for ctrl+j var RAGMsg = "Retrieved context for user's query:\n" +// containsToolSysMsg checks if the toolSysMsg already exists in the chat body +func containsToolSysMsg() bool { + for _, msg := range chatBody.Messages { + if msg.Role == cfg.ToolRole && msg.Content == toolSysMsg { + return true + } + } + return false +} + // SetImageAttachment sets an image to be attached to the next message sent to the LLM func SetImageAttachment(imagePath string) { imageAttachmentPath = imagePath @@ -122,7 +132,7 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages)) } } - if cfg.ToolUse && !resume && role == cfg.UserRole { + if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() { // add to chat body chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) } @@ -358,7 +368,7 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader logger.Debug("DeepSeekerCompletion: RAG message added to chat body", "message_count", len(chatBody.Messages)) } } - if cfg.ToolUse && !resume && role == cfg.UserRole { + if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() { // add to chat body chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) } @@ -420,11 +430,6 @@ func (ds DeepSeekerChat) GetToken() string { func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, error) { logger.Debug("formmsg deepseekerchat", "link", cfg.CurrentAPI) - if cfg.ToolUse && !resume && role == cfg.UserRole { - // prompt += "\n" + cfg.ToolRole + ":\n" + toolSysMsg - // add to chat body - chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) - } if msg != "" { // otherwise let the bot continue newMsg := models.RoleMsg{Role: role, Content: msg} chatBody.Messages = append(chatBody.Messages, newMsg) @@ -516,7 +521,7 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages)) } } - if cfg.ToolUse && !resume && role == cfg.UserRole { + if cfg.ToolUse && !resume && role == cfg.UserRole && !containsToolSysMsg() { // add to chat body chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) } @@ -18,7 +18,7 @@ var ( currentLocalModelIndex = 0 // Index to track current llama.cpp model shellMode = false // indexLine = "F12 to show keys help | bot resp mode: [orange:-:b]%v[-:-:-] (F6) | card's char: [orange:-:b]%s[-:-:-] (ctrl+s) | chat: [orange:-:b]%s[-:-:-] (F1) | toolUseAdviced: [orange:-:b]%v[-:-:-] (ctrl+k) | model: [orange:-:b]%s[-:-:-] (ctrl+l) | skip LLM resp: [orange:-:b]%v[-:-:-] (F10)\nAPI_URL: [orange:-:b]%s[-:-:-] (ctrl+v) | ThinkUse: [orange:-:b]%v[-:-:-] (ctrl+p) | Log Level: [orange:-:b]%v[-:-:-] (ctrl+p) | Recording: [orange:-:b]%v[-:-:-] (ctrl+r) | Writing as: [orange:-:b]%s[-:-:-] (ctrl+q)" - indexLineCompletion = "F12 to show keys help | bot responding: [orange:-:b]%v[-:-:-] (F6) | chat: [orange:-:b]%s[-:-:-] (F1) | toolUseAdviced: [orange:-:b]%v[-:-:-] (ctrl+k) | model: [orange:-:b]%s[-:-:-] (ctrl+l) | skip LLM resp: [orange:-:b]%v[-:-:-] (F10)\nAPI_URL: [orange:-:b]%s[-:-:-] (ctrl+v) | Insert <think>: [orange:-:b]%v[-:-:-] (ctrl+p) | Recording: [orange:-:b]%v[-:-:-] (ctrl+r) | Writing as: [orange:-:b]%s[-:-:-] (ctrl+q) | Bot will write as [orange:-:b]%s[-:-:-] (ctrl+x) | role_inject [orange:-:b]%v[-:-:-]" + indexLineCompletion = "F12 to show keys help | bot resp mode: [orange:-:b]%v[-:-:-] (F6) | chat: [orange:-:b]%s[-:-:-] (F1) | toolUseAdviced: [orange:-:b]%v[-:-:-] (ctrl+k) | model: [orange:-:b]%s[-:-:-] (ctrl+l) | skip LLM resp: [orange:-:b]%v[-:-:-] (F10)\nAPI: [orange:-:b]%s[-:-:-] (ctrl+v) | Recording: [orange:-:b]%v[-:-:-] (ctrl+r) | Writing as: [orange:-:b]%s[-:-:-] (ctrl+q) | Bot will write as [orange:-:b]%s[-:-:-] (ctrl+x) | role_inject [orange:-:b]%v[-:-:-]" focusSwitcher = map[tview.Primitive]tview.Primitive{} ) diff --git a/props_table.go b/props_table.go index 0c49056..d86e0b4 100644 --- a/props_table.go +++ b/props_table.go @@ -129,6 +129,9 @@ func makePropsTable(props map[string]float32) *tview.Table { addCheckboxRow("TTS Enabled", cfg.TTS_ENABLED, func(checked bool) { cfg.TTS_ENABLED = checked }) + addCheckboxRow("Auto clean tool calls from context", cfg.AutoCleanToolCallsFromCtx, func(checked bool) { + cfg.AutoCleanToolCallsFromCtx = checked + }) // Add dropdowns logLevels := []string{"Debug", "Info", "Warn"} addListPopupRow("Set log level", logLevels, GetLogLevel(), func(option string) { @@ -23,12 +23,10 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table { chatList[i] = name i++ } - // Add 1 extra row for header rows, cols := len(chatMap)+1, len(actions)+4 // +2 for name, +2 for timestamps chatActTable := tview.NewTable(). SetBorders(true) - // Add header row (row 0) for c := 0; c < cols; c++ { color := tcell.ColorWhite @@ -52,7 +50,7 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table { SetAlign(tview.AlignCenter). SetAttributes(tcell.AttrBold)) } - + previewLen := 100 // Add data rows (starting from row 1) for r := 0; r < rows-1; r++ { // rows-1 because we added a header row for c := 0; c < cols; c++ { @@ -65,8 +63,11 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table { SetTextColor(color). SetAlign(tview.AlignCenter)) case 1: + if len(chatMap[chatList[r]].Msgs) < 100 { + previewLen = len(chatMap[chatList[r]].Msgs) + } chatActTable.SetCell(r+1, c, // +1 to account for header row - tview.NewTableCell(chatMap[chatList[r]].Msgs[len(chatMap[chatList[r]].Msgs)-30:]). + tview.NewTableCell(chatMap[chatList[r]].Msgs[len(chatMap[chatList[r]].Msgs)-previewLen:]). SetSelectable(false). SetTextColor(color). SetAlign(tview.AlignCenter)) @@ -87,8 +88,8 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table { default: chatActTable.SetCell(r+1, c, // +1 to account for header row tview.NewTableCell(actions[c-4]). // Adjusted offset to account for 2 new timestamp columns - SetTextColor(color). - SetAlign(tview.AlignCenter)) + SetTextColor(color). + SetAlign(tview.AlignCenter)) } } } @@ -104,7 +105,6 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table { chatActTable.Select(1, column) // Move selection to first data row return } - tc := chatActTable.GetCell(row, column) tc.SetTextColor(tcell.ColorRed) chatActTable.SetSelectable(false, false) @@ -443,9 +443,7 @@ func makeLoadedRAGTable(fileList []string) *tview.Flex { } return } - tc := fileTable.GetCell(row, column) - // Check if the selected row is the exit row (row 0) - do this first to avoid index issues if row == 0 { pages.RemovePage(RAGLoadedPage) @@ -537,7 +535,6 @@ func makeAgentTable(agentList []string) *tview.Table { } return } - tc := chatActTable.GetCell(row, column) selected := agentList[row] // notification := fmt.Sprintf("chat: %s; action: %s", selectedChat, tc.Text) @@ -634,7 +631,6 @@ func makeCodeBlockTable(codeBlocks []string) *tview.Table { } return } - tc := table.GetCell(row, column) selected := codeBlocks[row] // notification := fmt.Sprintf("chat: %s; action: %s", selectedChat, tc.Text) @@ -706,7 +702,6 @@ func makeImportChatTable(filenames []string) *tview.Table { } return } - tc := chatActTable.GetCell(row, column) selected := filenames[row] // notification := fmt.Sprintf("chat: %s; action: %s", selectedChat, tc.Text) @@ -129,6 +129,7 @@ After that you are free to respond to the user. ` webSearchSysPrompt = `Summarize the web search results, extracting key information and presenting a concise answer. Provide sources and URLs where relevant.` readURLSysPrompt = `Extract and summarize the content from the webpage. Provide key information, main points, and any relevant details.` + summarySysPrompt = `Please provide a concise summary of the following conversation. Focus on key points, decisions, and actions. Provide only the summary, no additional commentary.` basicCard = &models.CharCard{ SysPrompt: basicSysMsg, FirstMsg: defaultFirstMsg, @@ -178,6 +179,8 @@ func registerWebAgents() { agent.Register("websearch", agent.NewWebAgentB(client, webSearchSysPrompt)) // Register read_url agent agent.Register("read_url", agent.NewWebAgentB(client, readURLSysPrompt)) + // Register summarize_chat agent + agent.Register("summarize_chat", agent.NewWebAgentB(client, summarySysPrompt)) }) } @@ -864,6 +867,15 @@ func isCommandAllowed(command string) bool { return allowedCommands[command] } +func summarizeChat(args map[string]string) []byte { + if len(chatBody.Messages) == 0 { + return []byte("No chat history to summarize.") + } + // Format chat history for the agent + chatText := chatToText(true) // include system and tool messages + return []byte(chatText) +} + type fnSig func(map[string]string) []byte var fnMap = map[string]fnSig{ @@ -884,6 +896,7 @@ var fnMap = map[string]fnSig{ "todo_read": todoRead, "todo_update": todoUpdate, "todo_delete": todoDelete, + "summarize_chat": summarizeChat, } // callToolWithAgent calls the tool and applies any registered agent. @@ -88,6 +88,7 @@ var ( [yellow]Ctrl+q[white]: cycle through mentioned chars in chat, to pick persona to send next msg as [yellow]Ctrl+x[white]: cycle through mentioned chars in chat, to pick persona to send next msg as (for llm) [yellow]Alt+1[white]: toggle shell mode (execute commands locally) +[yellow]Alt+3[white]: summarize chat history and start new chat with summary as tool response [yellow]Alt+4[white]: edit msg role [yellow]Alt+5[white]: toggle system and tool messages display [yellow]Alt+6[white]: toggle status line visibility @@ -779,6 +780,10 @@ func init() { textView.SetText(chatToText(cfg.ShowSys)) colorText() } + if event.Key() == tcell.KeyRune && event.Rune() == '3' && event.Modifiers()&tcell.ModAlt != 0 { + go summarizeAndStartNewChat() + return nil + } if event.Key() == tcell.KeyRune && event.Rune() == '6' && event.Modifiers()&tcell.ModAlt != 0 { // toggle status line visibility if name, _ := pages.GetFrontPage(); name != "main" { |
