From 7adfeacfb84512aad78d777eb57bf9a46721e24b Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Sun, 14 Dec 2025 11:53:50 +0300 Subject: Chore: gitignore update --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 3cd29c8..c366cf2 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,4 @@ gf-lt gflt chat_exports/*.json ragimport +.env -- cgit v1.2.3 From b1b007f0584efe2b6a553fb8996df6a997d56298 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Sun, 14 Dec 2025 12:31:17 +0300 Subject: Fix: dynamic model update based on api --- props_table.go | 45 ++++++++++++++++++++++++++++++++++++--------- 1 file changed, 36 insertions(+), 9 deletions(-) diff --git a/props_table.go b/props_table.go index dd359f4..1861ca3 100644 --- a/props_table.go +++ b/props_table.go @@ -130,21 +130,48 @@ func makePropsTable(props map[string]float32) *tview.Table { addListPopupRow("Set log level", logLevels, GetLogLevel(), func(option string) { setLogLevel(option) }) + // Helper function to get model list for a given API + getModelListForAPI := func(api string) []string { + var list []string + if strings.Contains(api, "api.deepseek.com/") { + list = []string{chatBody.Model, "deepseek-chat", "deepseek-reasoner"} + } else if strings.Contains(api, "openrouter.ai") { + list = ORFreeModels + } else { + list = LocalModels + } + // Ensure current chatBody.Model is in the list + if len(list) > 0 && !slices.Contains(list, chatBody.Model) { + list = slices.Insert(list, 0, chatBody.Model) + } + return list + } + + var modelRowIndex int // will be set before model row is added + // Prepare API links dropdown - insert current API at the beginning apiLinks := slices.Insert(cfg.ApiLinks, 0, cfg.CurrentAPI) addListPopupRow("Select an api", apiLinks, cfg.CurrentAPI, func(option string) { cfg.CurrentAPI = option + // Update model list based on new API + newModelList := getModelListForAPI(cfg.CurrentAPI) + modelCellID := fmt.Sprintf("listpopup_%d", modelRowIndex) + if data := cellData[modelCellID]; data != nil { + data.Options = newModelList + } + // Ensure chatBody.Model is in the new list; if not, set to first available model + if len(newModelList) > 0 && !slices.Contains(newModelList, chatBody.Model) { + chatBody.Model = newModelList[0] + // Update the displayed cell text + if cell := table.GetCell(modelRowIndex, 1); cell != nil { + cell.SetText(chatBody.Model) + } + } }) - var modelList []string - // INFO: modelList is chosen based on current api link - if strings.Contains(cfg.CurrentAPI, "api.deepseek.com/") { - modelList = []string{chatBody.Model, "deepseek-chat", "deepseek-reasoner"} - } else if strings.Contains(cfg.CurrentAPI, "opentouter.ai") { - modelList = ORFreeModels - } else { // would match on localhost but what if llama.cpp served non localy? - modelList = LocalModels - } + // Prepare model list dropdown + modelRowIndex = row + modelList := getModelListForAPI(cfg.CurrentAPI) addListPopupRow("Select a model", modelList, chatBody.Model, func(option string) { chatBody.Model = option }) -- cgit v1.2.3 From 090eb90ee7006715adeaf77ed78fb265b7310812 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Sun, 14 Dec 2025 13:02:15 +0300 Subject: Enha: notify user if model list is empty --- props_table.go | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/props_table.go b/props_table.go index 1861ca3..8cb956f 100644 --- a/props_table.go +++ b/props_table.go @@ -132,19 +132,12 @@ func makePropsTable(props map[string]float32) *tview.Table { }) // Helper function to get model list for a given API getModelListForAPI := func(api string) []string { - var list []string if strings.Contains(api, "api.deepseek.com/") { - list = []string{chatBody.Model, "deepseek-chat", "deepseek-reasoner"} + return []string{"deepseek-chat", "deepseek-reasoner"} } else if strings.Contains(api, "openrouter.ai") { - list = ORFreeModels - } else { - list = LocalModels + return ORFreeModels } - // Ensure current chatBody.Model is in the list - if len(list) > 0 && !slices.Contains(list, chatBody.Model) { - list = slices.Insert(list, 0, chatBody.Model) - } - return list + return LocalModels } var modelRowIndex int // will be set before model row is added @@ -256,6 +249,20 @@ func makePropsTable(props map[string]float32) *tview.Table { if cellData[listPopupCellID] != nil && cellData[listPopupCellID].Type == CellTypeListPopup { data := cellData[listPopupCellID] if onChange, ok := data.OnChange.(func(string)); ok && data.Options != nil { + // Check for empty options list + if len(data.Options) == 0 { + // Get label for context + labelCell := table.GetCell(selectedRow, 0) + label := "item" + if labelCell != nil { + label = labelCell.Text + } + logger.Warn("empty options list for", "label", label) + if err := notifyUser("Empty list", "No options available for " + label); err != nil { + logger.Error("failed to send notification", "error", err) + } + return + } // Create a list primitive apiList := tview.NewList().ShowSecondaryText(false). SetSelectedBackgroundColor(tcell.ColorGray) -- cgit v1.2.3 From 37b88e7879461d4bc668ff54fb19ef9870b2ba5c Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Sun, 14 Dec 2025 13:23:59 +0300 Subject: Fix: empty model list --- props_table.go | 67 +++++++++++++++++++++++++++++++++++++++++++++------------- 1 file changed, 52 insertions(+), 15 deletions(-) diff --git a/props_table.go b/props_table.go index 8cb956f..9408469 100644 --- a/props_table.go +++ b/props_table.go @@ -50,6 +50,7 @@ func makePropsTable(props map[string]float32) *tview.Table { row++ // Store cell data for later use in selection functions cellData := make(map[string]*CellData) + var modelCellID string // will be set for the model selection row // Helper function to add a checkbox-like row addCheckboxRow := func(label string, initialValue bool, onChange func(bool)) { table.SetCell(row, 0, @@ -148,22 +149,30 @@ func makePropsTable(props map[string]float32) *tview.Table { cfg.CurrentAPI = option // Update model list based on new API newModelList := getModelListForAPI(cfg.CurrentAPI) - modelCellID := fmt.Sprintf("listpopup_%d", modelRowIndex) - if data := cellData[modelCellID]; data != nil { - data.Options = newModelList + if modelCellID != "" { + if data := cellData[modelCellID]; data != nil { + data.Options = newModelList + } } // Ensure chatBody.Model is in the new list; if not, set to first available model if len(newModelList) > 0 && !slices.Contains(newModelList, chatBody.Model) { chatBody.Model = newModelList[0] - // Update the displayed cell text - if cell := table.GetCell(modelRowIndex, 1); cell != nil { - cell.SetText(chatBody.Model) + // Update the displayed cell text - need to find model row + // Search for model row by label + for r := 0; r < table.GetRowCount(); r++ { + if cell := table.GetCell(r, 0); cell != nil && cell.Text == "Select a model" { + if valueCell := table.GetCell(r, 1); valueCell != nil { + valueCell.SetText(chatBody.Model) + } + break + } } } }) // Prepare model list dropdown modelRowIndex = row + modelCellID = fmt.Sprintf("listpopup_%d", modelRowIndex) modelList := getModelListForAPI(cfg.CurrentAPI) addListPopupRow("Select a model", modelList, chatBody.Model, func(option string) { chatBody.Model = option @@ -248,17 +257,45 @@ func makePropsTable(props map[string]float32) *tview.Table { listPopupCellID := fmt.Sprintf("listpopup_%d", selectedRow) if cellData[listPopupCellID] != nil && cellData[listPopupCellID].Type == CellTypeListPopup { data := cellData[listPopupCellID] - if onChange, ok := data.OnChange.(func(string)); ok && data.Options != nil { + if onChange, ok := data.OnChange.(func(string)); ok { + // Get label for context + labelCell := table.GetCell(selectedRow, 0) + label := "item" + if labelCell != nil { + label = labelCell.Text + } + + // For model selection, always compute fresh options from current API + if label == "Select a model" { + freshOptions := getModelListForAPI(cfg.CurrentAPI) + data.Options = freshOptions + // Also update the cell data map + cellData[listPopupCellID].Options = freshOptions + } + + // Handle nil options + if data.Options == nil { + logger.Error("options list is nil for", "label", label) + if err := notifyUser("Configuration error", "Options list is nil for " + label); err != nil { + logger.Error("failed to send notification", "error", err) + } + return + } + // Check for empty options list if len(data.Options) == 0 { - // Get label for context - labelCell := table.GetCell(selectedRow, 0) - label := "item" - if labelCell != nil { - label = labelCell.Text + logger.Warn("empty options list for", "label", label, "api", cfg.CurrentAPI, "localModelsLen", len(LocalModels), "orModelsLen", len(ORFreeModels)) + message := "No options available for " + label + if label == "Select a model" { + if strings.Contains(cfg.CurrentAPI, "openrouter.ai") { + message = "No OpenRouter models available. Check token and connection." + } else if strings.Contains(cfg.CurrentAPI, "api.deepseek.com") { + message = "DeepSeek models should be available. Please report bug." + } else { + message = "No llama.cpp models loaded. Ensure llama.cpp server is running with models." + } } - logger.Warn("empty options list for", "label", label) - if err := notifyUser("Empty list", "No options available for " + label); err != nil { + if err := notifyUser("Empty list", message); err != nil { logger.Error("failed to send notification", "error", err) } return @@ -266,7 +303,7 @@ func makePropsTable(props map[string]float32) *tview.Table { // Create a list primitive apiList := tview.NewList().ShowSecondaryText(false). SetSelectedBackgroundColor(tcell.ColorGray) - apiList.SetTitle("Select an API").SetBorder(true) + apiList.SetTitle("Select " + label).SetBorder(true) for i, api := range data.Options { if api == cell.Text { apiList.SetCurrentItem(i) -- cgit v1.2.3 From 82ffc364d34f4906ef4c4c1bd4bd202d393a46bc Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Sun, 14 Dec 2025 13:34:26 +0300 Subject: Chore: remove unused ticker --- bot.go | 11 +---------- props_table.go | 5 +---- 2 files changed, 2 insertions(+), 14 deletions(-) diff --git a/bot.go b/bot.go index 3242b88..e2f03b8 100644 --- a/bot.go +++ b/bot.go @@ -841,15 +841,6 @@ func updateModelLists() { } } -func updateModelListsTicker() { - updateModelLists() // run on the start - ticker := time.NewTicker(time.Minute * 1) - for { - <-ticker.C - updateModelLists() - } -} - func init() { var err error cfg, err = config.LoadConfig("config.toml") @@ -910,5 +901,5 @@ func init() { if cfg.STT_ENABLED { asr = extra.NewSTT(logger, cfg) } - go updateModelListsTicker() + go updateModelLists() } diff --git a/props_table.go b/props_table.go index 9408469..dfbace8 100644 --- a/props_table.go +++ b/props_table.go @@ -140,9 +140,7 @@ func makePropsTable(props map[string]float32) *tview.Table { } return LocalModels } - var modelRowIndex int // will be set before model row is added - // Prepare API links dropdown - insert current API at the beginning apiLinks := slices.Insert(cfg.ApiLinks, 0, cfg.CurrentAPI) addListPopupRow("Select an api", apiLinks, cfg.CurrentAPI, func(option string) { @@ -169,7 +167,6 @@ func makePropsTable(props map[string]float32) *tview.Table { } } }) - // Prepare model list dropdown modelRowIndex = row modelCellID = fmt.Sprintf("listpopup_%d", modelRowIndex) @@ -276,7 +273,7 @@ func makePropsTable(props map[string]float32) *tview.Table { // Handle nil options if data.Options == nil { logger.Error("options list is nil for", "label", label) - if err := notifyUser("Configuration error", "Options list is nil for " + label); err != nil { + if err := notifyUser("Configuration error", "Options list is nil for "+label); err != nil { logger.Error("failed to send notification", "error", err) } return -- cgit v1.2.3 From edbacb813bd148db33d8747ada293ef2acabe7e9 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Sun, 14 Dec 2025 14:43:00 +0300 Subject: Enha: toggle visibility of status line --- helpfuncs.go | 2 +- tui.go | 57 +++++++++++++++++++++++++++++++++++++++++---------------- 2 files changed, 42 insertions(+), 17 deletions(-) diff --git a/helpfuncs.go b/helpfuncs.go index df49ae5..edcb7fe 100644 --- a/helpfuncs.go +++ b/helpfuncs.go @@ -63,7 +63,7 @@ func colorText() { } func updateStatusLine() { - position.SetText(makeStatusLine()) + statusLineWidget.SetText(makeStatusLine()) helpView.SetText(fmt.Sprintf(helpText, makeStatusLine())) } diff --git a/tui.go b/tui.go index 383d680..941371b 100644 --- a/tui.go +++ b/tui.go @@ -23,7 +23,7 @@ var ( textArea *tview.TextArea editArea *tview.TextArea textView *tview.TextView - position *tview.TextView + statusLineWidget *tview.TextView helpView *tview.TextView flex *tview.Flex imgView *tview.Image @@ -32,6 +32,7 @@ var ( renameWindow *tview.InputField roleEditWindow *tview.InputField fullscreenMode bool + positionVisible bool = true // pages historyPage = "historyPage" agentPage = "agentPage" @@ -87,6 +88,7 @@ var ( [yellow]Alt+1[white]: toggle shell mode (execute commands locally) [yellow]Alt+4[white]: edit msg role [yellow]Alt+5[white]: toggle system and tool messages display +[yellow]Alt+6[white]: toggle status line visibility === scrolling chat window (some keys similar to vim) === [yellow]arrows up/down and j/k[white]: scroll up and down @@ -171,6 +173,26 @@ func toggleShellMode() { updateStatusLine() } +func updateFlexLayout() { + if fullscreenMode { + // flex already contains only focused widget; do nothing + return + } + flex.Clear() + flex.AddItem(textView, 0, 40, false) + flex.AddItem(textArea, 0, 10, false) + if positionVisible { + flex.AddItem(statusLineWidget, 0, 2, false) + } + // Keep focus on currently focused widget + focused := app.GetFocus() + if focused == textView { + app.SetFocus(textView) + } else { + app.SetFocus(textArea) + } +} + func executeCommandAndDisplay(cmdText string) { // Parse the command (split by spaces, but handle quoted arguments) cmdParts := parseCommand(cmdText) @@ -456,8 +478,10 @@ func init() { // flex = tview.NewFlex().SetDirection(tview.FlexRow). AddItem(textView, 0, 40, false). - AddItem(textArea, 0, 10, true). // Restore original height - AddItem(position, 0, 2, false) + AddItem(textArea, 0, 10, true) // Restore original height + if positionVisible { + flex.AddItem(statusLineWidget, 0, 2, false) + } // textView.SetBorder(true).SetTitle("chat") textView.SetDoneFunc(func(key tcell.Key) { if key == tcell.KeyEnter { @@ -516,14 +540,16 @@ func init() { }) focusSwitcher[textArea] = textView focusSwitcher[textView] = textArea - position = tview.NewTextView(). + statusLineWidget = tview.NewTextView(). SetDynamicColors(true). SetTextAlign(tview.AlignCenter) // Initially set up flex without search bar flex = tview.NewFlex().SetDirection(tview.FlexRow). AddItem(textView, 0, 40, false). - AddItem(textArea, 0, 10, true). // Restore original height - AddItem(position, 0, 2, false) + AddItem(textArea, 0, 10, true) // Restore original height + if positionVisible { + flex.AddItem(statusLineWidget, 0, 2, false) + } editArea = tview.NewTextArea(). SetPlaceholder("Replace msg...") editArea.SetBorder(true).SetTitle("input") @@ -749,6 +775,14 @@ func init() { textView.SetText(chatToText(cfg.ShowSys)) colorText() } + if event.Key() == tcell.KeyRune && event.Rune() == '6' && event.Modifiers()&tcell.ModAlt != 0 { + // toggle status line visibility + if name, _ := pages.GetFrontPage(); name != "main" { + return event + } + positionVisible = !positionVisible + updateFlexLayout() + } if event.Key() == tcell.KeyF1 { // chatList, err := loadHistoryChats() chatList, err := store.GetChatByChar(cfg.AssistantRole) @@ -841,16 +875,7 @@ func init() { } } else { // focused is the fullscreened widget here - flex.Clear(). - AddItem(textView, 0, 40, false). - AddItem(textArea, 0, 10, false). - AddItem(position, 0, 2, false) - - if focused == textView { - app.SetFocus(textView) - } else { // default to textArea - app.SetFocus(textArea) - } + updateFlexLayout() } return nil } -- cgit v1.2.3 From 33f9ed2466e960223389da59a423cc697ff615ba Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Sun, 14 Dec 2025 14:44:24 +0300 Subject: Chore: cleanup --- helpfuncs.go | 10 ++++++++++ main.go | 10 ---------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/helpfuncs.go b/helpfuncs.go index edcb7fe..194d68c 100644 --- a/helpfuncs.go +++ b/helpfuncs.go @@ -8,8 +8,18 @@ import ( "os" "path" "strings" + "unicode" ) +func isASCII(s string) bool { + for i := 0; i < len(s); i++ { + if s[i] > unicode.MaxASCII { + return false + } + } + return true +} + func colorText() { text := textView.GetText(false) quoteReplacer := strings.NewReplacer( diff --git a/main.go b/main.go index 63d0976..ec175bf 100644 --- a/main.go +++ b/main.go @@ -3,7 +3,6 @@ package main import ( "flag" "strconv" - "unicode" "github.com/rivo/tview" ) @@ -23,15 +22,6 @@ var ( focusSwitcher = map[tview.Primitive]tview.Primitive{} ) -func isASCII(s string) bool { - for i := 0; i < len(s); i++ { - if s[i] > unicode.MaxASCII { - return false - } - } - return true -} - func main() { apiPort := flag.Int("port", 0, "port to host api") flag.Parse() -- cgit v1.2.3 From 35851647a191f779943530591610a9b22ffaeff9 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Tue, 16 Dec 2025 21:52:03 +0300 Subject: Fix: remove dulicate from apilinks list --- props_table.go | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/props_table.go b/props_table.go index dfbace8..774ea32 100644 --- a/props_table.go +++ b/props_table.go @@ -141,8 +141,14 @@ func makePropsTable(props map[string]float32) *tview.Table { return LocalModels } var modelRowIndex int // will be set before model row is added - // Prepare API links dropdown - insert current API at the beginning - apiLinks := slices.Insert(cfg.ApiLinks, 0, cfg.CurrentAPI) + // Prepare API links dropdown - ensure current API is first, avoid duplicates + apiLinks := make([]string, 0, len(cfg.ApiLinks)+1) + apiLinks = append(apiLinks, cfg.CurrentAPI) + for _, api := range cfg.ApiLinks { + if api != cfg.CurrentAPI { + apiLinks = append(apiLinks, api) + } + } addListPopupRow("Select an api", apiLinks, cfg.CurrentAPI, func(option string) { cfg.CurrentAPI = option // Update model list based on new API -- cgit v1.2.3 From d73c3abd6bda8690e8b5e57342221c8cb2cc88b3 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Wed, 17 Dec 2025 13:03:40 +0300 Subject: Feat: preload lcp model --- bot.go | 55 ++++++++++++++++++++++++++++++++++++++++++++++++++++++- tui.go | 9 +++++++++ 2 files changed, 63 insertions(+), 1 deletion(-) diff --git a/bot.go b/bot.go index e2f03b8..8a0ba0a 100644 --- a/bot.go +++ b/bot.go @@ -16,6 +16,7 @@ import ( "log/slog" "net" "net/http" + "net/url" "os" "path" "strings" @@ -188,6 +189,58 @@ func createClient(connectTimeout time.Duration) *http.Client { } } +func warmUpModel() { + u, err := url.Parse(cfg.CurrentAPI) + if err != nil { + return + } + host := u.Hostname() + if host != "localhost" && host != "127.0.0.1" && host != "::1" { + return + } + go func() { + var data []byte + var err error + if strings.HasSuffix(cfg.CurrentAPI, "/completion") { + // Old completion endpoint + req := models.NewLCPReq(".", chatBody.Model, nil, map[string]float32{ + "temperature": 0.8, + "dry_multiplier": 0.0, + "min_p": 0.05, + "n_predict": 0, + }, []string{}) + req.Stream = false + data, err = json.Marshal(req) + } else if strings.Contains(cfg.CurrentAPI, "/v1/chat/completions") { + // OpenAI-compatible chat endpoint + req := models.OpenAIReq{ + ChatBody: &models.ChatBody{ + Model: chatBody.Model, + Messages: []models.RoleMsg{ + {Role: "system", Content: "."}, + }, + Stream: false, + }, + Tools: nil, + } + data, err = json.Marshal(req) + } else { + // Unknown local endpoint, skip + return + } + if err != nil { + logger.Debug("failed to marshal warmup request", "error", err) + return + } + resp, err := httpClient.Post(cfg.CurrentAPI, "application/json", bytes.NewReader(data)) + if err != nil { + logger.Debug("warmup request failed", "error", err) + return + } + resp.Body.Close() + }() +} + func fetchLCPModelName() *models.LCPModels { //nolint resp, err := httpClient.Get(cfg.FetchModelNameAPI) @@ -894,7 +947,7 @@ func init() { cluedoState = extra.CluedoPrepCards(playerOrder) } choseChunkParser() - httpClient = createClient(time.Second * 15) + httpClient = createClient(time.Second * 90) if cfg.TTS_ENABLED { orator = extra.NewOrator(logger, cfg) } diff --git a/tui.go b/tui.go index 941371b..53c8cfd 100644 --- a/tui.go +++ b/tui.go @@ -89,6 +89,7 @@ var ( [yellow]Alt+4[white]: edit msg role [yellow]Alt+5[white]: toggle system and tool messages display [yellow]Alt+6[white]: toggle status line visibility +[yellow]Alt+9[white]: warm up (load) selected llama.cpp model === scrolling chat window (some keys similar to vim) === [yellow]arrows up/down and j/k[white]: scroll up and down @@ -1235,6 +1236,14 @@ func init() { toggleShellMode() return nil } + if event.Key() == tcell.KeyRune && event.Modifiers() == tcell.ModAlt && event.Rune() == '9' { + // Warm up (load) the currently selected model + go warmUpModel() + if err := notifyUser("model warmup", "loading model: "+chatBody.Model); err != nil { + logger.Debug("failed to notify user", "error", err) + } + return nil + } // cannot send msg in editMode or botRespMode if event.Key() == tcell.KeyEscape && !editMode && !botRespMode { msgText := textArea.GetText() -- cgit v1.2.3 From a06cfd995f05782854844e51a71a656f70274f64 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Thu, 18 Dec 2025 11:53:07 +0300 Subject: Feat: add agent entity --- agent/agent.go | 60 ++++++++++++++++++++++++++++ agent/format.go | 120 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ bot.go | 4 +- tools.go | 24 ++++++++++++ 4 files changed, 206 insertions(+), 2 deletions(-) create mode 100644 agent/agent.go create mode 100644 agent/format.go diff --git a/agent/agent.go b/agent/agent.go new file mode 100644 index 0000000..30e30e3 --- /dev/null +++ b/agent/agent.go @@ -0,0 +1,60 @@ +package agent + +// Agent defines an interface for processing tool outputs. +// An Agent can clean, summarize, or otherwise transform raw tool outputs +// before they are presented to the main LLM. +type Agent interface { + // Process takes the original tool arguments and the raw output from the tool, + // and returns a cleaned/summarized version suitable for the main LLM context. + Process(args map[string]string, rawOutput []byte) []byte +} + +// registry holds mapping from tool names to agents. +var registry = make(map[string]Agent) + +// Register adds an agent for a specific tool name. +// If an agent already exists for the tool, it will be replaced. +func Register(toolName string, a Agent) { + registry[toolName] = a +} + +// Get returns the agent for a tool name, or nil if none is registered. +func Get(toolName string) Agent { + return registry[toolName] +} + +// FormatterAgent is a simple agent that applies formatting functions. +type FormatterAgent struct { + formatFunc func([]byte) (string, error) +} + +// NewFormatterAgent creates a FormatterAgent that uses the given formatting function. +func NewFormatterAgent(formatFunc func([]byte) (string, error)) *FormatterAgent { + return &FormatterAgent{formatFunc: formatFunc} +} + +// Process applies the formatting function to raw output. +func (a *FormatterAgent) Process(args map[string]string, rawOutput []byte) []byte { + if a.formatFunc == nil { + return rawOutput + } + formatted, err := a.formatFunc(rawOutput) + if err != nil { + // On error, return raw output with a warning prefix + return []byte("[formatting failed, showing raw output]\n" + string(rawOutput)) + } + return []byte(formatted) +} + +// DefaultFormatter returns a FormatterAgent that uses the appropriate formatting +// based on tool name. +func DefaultFormatter(toolName string) Agent { + switch toolName { + case "websearch": + return NewFormatterAgent(FormatSearchResults) + case "read_url": + return NewFormatterAgent(FormatWebPageContent) + default: + return nil + } +} \ No newline at end of file diff --git a/agent/format.go b/agent/format.go new file mode 100644 index 0000000..01ecb07 --- /dev/null +++ b/agent/format.go @@ -0,0 +1,120 @@ +package agent + +import ( + "encoding/json" + "fmt" + "strings" +) + +// FormatSearchResults takes raw JSON from websearch and returns a concise summary. +func FormatSearchResults(rawJSON []byte) (string, error) { + // Try to unmarshal as generic slice of maps + var results []map[string]interface{} + if err := json.Unmarshal(rawJSON, &results); err != nil { + // If that fails, try as a single map (maybe wrapper object) + var wrapper map[string]interface{} + if err2 := json.Unmarshal(rawJSON, &wrapper); err2 == nil { + // Look for a "results" or "data" field + if data, ok := wrapper["results"].([]interface{}); ok { + // Convert to slice of maps + for _, item := range data { + if m, ok := item.(map[string]interface{}); ok { + results = append(results, m) + } + } + } else if data, ok := wrapper["data"].([]interface{}); ok { + for _, item := range data { + if m, ok := item.(map[string]interface{}); ok { + results = append(results, m) + } + } + } else { + // No slice found, treat wrapper as single result + results = []map[string]interface{}{wrapper} + } + } else { + return "", fmt.Errorf("failed to unmarshal search results: %v (also %v)", err, err2) + } + } + + if len(results) == 0 { + return "No search results found.", nil + } + + var sb strings.Builder + sb.WriteString(fmt.Sprintf("Found %d results:\n", len(results))) + for i, r := range results { + // Extract common fields + title := getString(r, "title", "Title", "name", "heading") + snippet := getString(r, "snippet", "description", "content", "body", "text", "summary") + url := getString(r, "url", "link", "uri", "source") + + sb.WriteString(fmt.Sprintf("%d. ", i+1)) + if title != "" { + sb.WriteString(fmt.Sprintf("**%s**", title)) + } else { + sb.WriteString("(No title)") + } + if snippet != "" { + // Truncate snippet to reasonable length + if len(snippet) > 200 { + snippet = snippet[:200] + "..." + } + sb.WriteString(fmt.Sprintf(" — %s", snippet)) + } + if url != "" { + sb.WriteString(fmt.Sprintf(" (%s)", url)) + } + sb.WriteString("\n") + } + return sb.String(), nil +} + +// FormatWebPageContent takes raw JSON from read_url and returns a concise summary. +func FormatWebPageContent(rawJSON []byte) (string, error) { + // Try to unmarshal as generic map + var data map[string]interface{} + if err := json.Unmarshal(rawJSON, &data); err != nil { + // If that fails, try as string directly + var content string + if err2 := json.Unmarshal(rawJSON, &content); err2 == nil { + return truncateText(content, 500), nil + } + // Both failed, return first error + return "", fmt.Errorf("failed to unmarshal web page content: %v", err) + } + + // Look for common content fields + content := getString(data, "content", "text", "body", "article", "html", "markdown", "data") + if content == "" { + // If no content field, marshal the whole thing as a short string + summary := fmt.Sprintf("%v", data) + return truncateText(summary, 300), nil + } + return truncateText(content, 500), nil +} + +// Helper to get a string value from a map, trying multiple keys. +func getString(m map[string]interface{}, keys ...string) string { + for _, k := range keys { + if val, ok := m[k]; ok { + switch v := val.(type) { + case string: + return v + case fmt.Stringer: + return v.String() + default: + return fmt.Sprintf("%v", v) + } + } + } + return "" +} + +// Helper to truncate text and add ellipsis. +func truncateText(s string, maxLen int) string { + if len(s) <= maxLen { + return s + } + return s[:maxLen] + "..." +} \ No newline at end of file diff --git a/bot.go b/bot.go index 8a0ba0a..ddc6d2a 100644 --- a/bot.go +++ b/bot.go @@ -756,7 +756,7 @@ func findCall(msg, toolCall string, tv *tview.TextView) { } } // call a func - f, ok := fnMap[fc.Name] + _, ok := fnMap[fc.Name] if !ok { m := fc.Name + " is not implemented" // Create tool response message with the proper tool_call_id @@ -775,7 +775,7 @@ func findCall(msg, toolCall string, tv *tview.TextView) { chatRound("", cfg.AssistantRole, tv, false, false) return } - resp := f(fc.Args) + resp := callToolWithAgent(fc.Name, fc.Args) toolMsg := string(resp) // Remove the "tool response: " prefix and %+v formatting logger.Info("llm used tool call", "tool_resp", toolMsg, "tool_attrs", fc) fmt.Fprintf(tv, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n", diff --git a/tools.go b/tools.go index d603507..e4af7ad 100644 --- a/tools.go +++ b/tools.go @@ -4,6 +4,7 @@ import ( "context" "encoding/json" "fmt" + "gf-lt/agent" "gf-lt/extra" "gf-lt/models" "io" @@ -848,6 +849,29 @@ var fnMap = map[string]fnSig{ "todo_delete": todoDelete, } +// callToolWithAgent calls the tool and applies any registered agent. +func callToolWithAgent(name string, args map[string]string) []byte { + f, ok := fnMap[name] + if !ok { + return []byte(fmt.Sprintf("tool %s not found", name)) + } + raw := f(args) + if a := agent.Get(name); a != nil { + return a.Process(args, raw) + } + return raw +} + +// registerDefaultAgents registers default agents for formatting. +func registerDefaultAgents() { + agent.Register("websearch", agent.DefaultFormatter("websearch")) + agent.Register("read_url", agent.DefaultFormatter("read_url")) +} + +func init() { + registerDefaultAgents() +} + // openai style def var baseTools = []models.Tool{ // websearch -- cgit v1.2.3 From 8cdec5e54455c3dfb74c2e8016f17f806f86fa54 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Thu, 18 Dec 2025 14:39:06 +0300 Subject: Feat: http request for agent --- agent/request.go | 42 ++++++++++++++++++++++++++++++++++++++++++ bot.go | 4 +--- 2 files changed, 43 insertions(+), 3 deletions(-) create mode 100644 agent/request.go diff --git a/agent/request.go b/agent/request.go new file mode 100644 index 0000000..3b8d083 --- /dev/null +++ b/agent/request.go @@ -0,0 +1,42 @@ +package agent + +import ( + "gf-lt/config" + "io" + "log/slog" + "net/http" +) + +var httpClient = &http.Client{} + +type AgentClient struct { + cfg *config.Config + getToken func() string + log slog.Logger +} + +func NewAgentClient(cfg *config.Config, log slog.Logger, gt func() string) *AgentClient { + return &AgentClient{ + cfg: cfg, + getToken: gt, + log: log, + } +} + +func (ag *AgentClient) LLMRequest(body io.Reader) ([]byte, error) { + req, err := http.NewRequest("POST", ag.cfg.CurrentAPI, body) + if err != nil { + return nil, err + } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + req.Header.Add("Authorization", "Bearer "+ag.getToken()) + req.Header.Set("Accept-Encoding", "gzip") + resp, err := httpClient.Do(req) + if err != nil { + ag.log.Error("llamacpp api", "error", err) + return nil, err + } + defer resp.Body.Close() + return io.ReadAll(resp.Body) +} diff --git a/bot.go b/bot.go index ddc6d2a..d6418ff 100644 --- a/bot.go +++ b/bot.go @@ -327,12 +327,11 @@ func fetchLCPModels() ([]string, error) { return localModels, nil } +// sendMsgToLLM expects streaming resp func sendMsgToLLM(body io.Reader) { choseChunkParser() - var req *http.Request var err error - // Capture and log the request body for debugging if _, ok := body.(*io.LimitedReader); ok { // If it's a LimitedReader, we need to handle it differently @@ -379,7 +378,6 @@ func sendMsgToLLM(body io.Reader) { return } } - req.Header.Add("Accept", "application/json") req.Header.Add("Content-Type", "application/json") req.Header.Add("Authorization", "Bearer "+chunkParser.GetToken()) -- cgit v1.2.3 From 5f852418d8d12868df83a9591b15e0846971fff9 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Thu, 18 Dec 2025 15:22:03 +0300 Subject: Chore: request cleanup --- bot.go | 62 +++++++++++--------------------------------------------------- 1 file changed, 11 insertions(+), 51 deletions(-) diff --git a/bot.go b/bot.go index d6418ff..f2683bb 100644 --- a/bot.go +++ b/bot.go @@ -330,59 +330,19 @@ func fetchLCPModels() ([]string, error) { // sendMsgToLLM expects streaming resp func sendMsgToLLM(body io.Reader) { choseChunkParser() - var req *http.Request - var err error - // Capture and log the request body for debugging - if _, ok := body.(*io.LimitedReader); ok { - // If it's a LimitedReader, we need to handle it differently - logger.Debug("request body type is LimitedReader", "parser", chunkParser, "link", cfg.CurrentAPI) - req, err = http.NewRequest("POST", cfg.CurrentAPI, body) - if err != nil { - logger.Error("newreq error", "error", err) - if err := notifyUser("error", "apicall failed:"+err.Error()); err != nil { - logger.Error("failed to notify", "error", err) - } - streamDone <- true - return - } - req.Header.Add("Accept", "application/json") - req.Header.Add("Content-Type", "application/json") - req.Header.Add("Authorization", "Bearer "+chunkParser.GetToken()) - req.Header.Set("Accept-Encoding", "gzip") - } else { - // For other reader types, capture and log the body content - bodyBytes, err := io.ReadAll(body) - if err != nil { - logger.Error("failed to read request body for logging", "error", err) - // Create request with original body if reading fails - req, err = http.NewRequest("POST", cfg.CurrentAPI, bytes.NewReader(bodyBytes)) - if err != nil { - logger.Error("newreq error", "error", err) - if err := notifyUser("error", "apicall failed:"+err.Error()); err != nil { - logger.Error("failed to notify", "error", err) - } - streamDone <- true - return - } - } else { - // Log the request body for debugging - logger.Debug("sending request to API", "api", cfg.CurrentAPI, "body", string(bodyBytes)) - // Create request with the captured body - req, err = http.NewRequest("POST", cfg.CurrentAPI, bytes.NewReader(bodyBytes)) - if err != nil { - logger.Error("newreq error", "error", err) - if err := notifyUser("error", "apicall failed:"+err.Error()); err != nil { - logger.Error("failed to notify", "error", err) - } - streamDone <- true - return - } + req, err := http.NewRequest("POST", cfg.CurrentAPI, body) + if err != nil { + logger.Error("newreq error", "error", err) + if err := notifyUser("error", "apicall failed:"+err.Error()); err != nil { + logger.Error("failed to notify", "error", err) } - req.Header.Add("Accept", "application/json") - req.Header.Add("Content-Type", "application/json") - req.Header.Add("Authorization", "Bearer "+chunkParser.GetToken()) - req.Header.Set("Accept-Encoding", "gzip") + streamDone <- true + return } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + req.Header.Add("Authorization", "Bearer "+chunkParser.GetToken()) + req.Header.Set("Accept-Encoding", "gzip") // nolint resp, err := httpClient.Do(req) if err != nil { -- cgit v1.2.3 From 67ea1aef0dafb9dc6f82e009cc1ecc613f71e520 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Fri, 19 Dec 2025 11:06:22 +0300 Subject: Feat: two agent types; WebAgentB impl --- agent/agent.go | 71 +++++++++++--------------------- agent/format.go | 120 ------------------------------------------------------ agent/request.go | 22 ++++++++++ agent/webagent.go | 34 ++++++++++++++++ bot.go | 1 + config/config.go | 2 +- props_table.go | 2 + tui.go | 30 +++++++------- 8 files changed, 99 insertions(+), 183 deletions(-) delete mode 100644 agent/format.go create mode 100644 agent/webagent.go diff --git a/agent/agent.go b/agent/agent.go index 30e30e3..5ad1ef1 100644 --- a/agent/agent.go +++ b/agent/agent.go @@ -1,60 +1,35 @@ package agent -// Agent defines an interface for processing tool outputs. -// An Agent can clean, summarize, or otherwise transform raw tool outputs -// before they are presented to the main LLM. -type Agent interface { - // Process takes the original tool arguments and the raw output from the tool, - // and returns a cleaned/summarized version suitable for the main LLM context. - Process(args map[string]string, rawOutput []byte) []byte -} +// I see two types of agents possible: +// ones who do their own tools calls +// ones that works only with the output -// registry holds mapping from tool names to agents. -var registry = make(map[string]Agent) +// A: main chat -> agent (handles everything: tool + processing) +// B: main chat -> tool -> agent (process tool output) -// Register adds an agent for a specific tool name. -// If an agent already exists for the tool, it will be replaced. -func Register(toolName string, a Agent) { - registry[toolName] = a +// AgenterA gets a task "find out weather in london" +// proceeds to make tool calls on its own +type AgenterA interface { + ProcessTask(task string) []byte } -// Get returns the agent for a tool name, or nil if none is registered. -func Get(toolName string) Agent { - return registry[toolName] +// AgenterB defines an interface for processing tool outputs +type AgenterB interface { + // Process takes the original tool arguments and the raw output from the tool, + // and returns a cleaned/summarized version suitable for the main LLM context + Process(args map[string]string, rawOutput []byte) []byte } -// FormatterAgent is a simple agent that applies formatting functions. -type FormatterAgent struct { - formatFunc func([]byte) (string, error) -} +// registry holds mapping from tool names to agents +var RegistryB = make(map[string]AgenterB) +var RegistryA = make(map[AgenterA][]string) -// NewFormatterAgent creates a FormatterAgent that uses the given formatting function. -func NewFormatterAgent(formatFunc func([]byte) (string, error)) *FormatterAgent { - return &FormatterAgent{formatFunc: formatFunc} +// Register adds an agent for a specific tool name +// If an agent already exists for the tool, it will be replaced +func RegisterB(toolName string, a AgenterB) { + RegistryB[toolName] = a } -// Process applies the formatting function to raw output. -func (a *FormatterAgent) Process(args map[string]string, rawOutput []byte) []byte { - if a.formatFunc == nil { - return rawOutput - } - formatted, err := a.formatFunc(rawOutput) - if err != nil { - // On error, return raw output with a warning prefix - return []byte("[formatting failed, showing raw output]\n" + string(rawOutput)) - } - return []byte(formatted) +func RegisterA(toolNames []string, a AgenterA) { + RegistryA[a] = toolNames } - -// DefaultFormatter returns a FormatterAgent that uses the appropriate formatting -// based on tool name. -func DefaultFormatter(toolName string) Agent { - switch toolName { - case "websearch": - return NewFormatterAgent(FormatSearchResults) - case "read_url": - return NewFormatterAgent(FormatWebPageContent) - default: - return nil - } -} \ No newline at end of file diff --git a/agent/format.go b/agent/format.go deleted file mode 100644 index 01ecb07..0000000 --- a/agent/format.go +++ /dev/null @@ -1,120 +0,0 @@ -package agent - -import ( - "encoding/json" - "fmt" - "strings" -) - -// FormatSearchResults takes raw JSON from websearch and returns a concise summary. -func FormatSearchResults(rawJSON []byte) (string, error) { - // Try to unmarshal as generic slice of maps - var results []map[string]interface{} - if err := json.Unmarshal(rawJSON, &results); err != nil { - // If that fails, try as a single map (maybe wrapper object) - var wrapper map[string]interface{} - if err2 := json.Unmarshal(rawJSON, &wrapper); err2 == nil { - // Look for a "results" or "data" field - if data, ok := wrapper["results"].([]interface{}); ok { - // Convert to slice of maps - for _, item := range data { - if m, ok := item.(map[string]interface{}); ok { - results = append(results, m) - } - } - } else if data, ok := wrapper["data"].([]interface{}); ok { - for _, item := range data { - if m, ok := item.(map[string]interface{}); ok { - results = append(results, m) - } - } - } else { - // No slice found, treat wrapper as single result - results = []map[string]interface{}{wrapper} - } - } else { - return "", fmt.Errorf("failed to unmarshal search results: %v (also %v)", err, err2) - } - } - - if len(results) == 0 { - return "No search results found.", nil - } - - var sb strings.Builder - sb.WriteString(fmt.Sprintf("Found %d results:\n", len(results))) - for i, r := range results { - // Extract common fields - title := getString(r, "title", "Title", "name", "heading") - snippet := getString(r, "snippet", "description", "content", "body", "text", "summary") - url := getString(r, "url", "link", "uri", "source") - - sb.WriteString(fmt.Sprintf("%d. ", i+1)) - if title != "" { - sb.WriteString(fmt.Sprintf("**%s**", title)) - } else { - sb.WriteString("(No title)") - } - if snippet != "" { - // Truncate snippet to reasonable length - if len(snippet) > 200 { - snippet = snippet[:200] + "..." - } - sb.WriteString(fmt.Sprintf(" — %s", snippet)) - } - if url != "" { - sb.WriteString(fmt.Sprintf(" (%s)", url)) - } - sb.WriteString("\n") - } - return sb.String(), nil -} - -// FormatWebPageContent takes raw JSON from read_url and returns a concise summary. -func FormatWebPageContent(rawJSON []byte) (string, error) { - // Try to unmarshal as generic map - var data map[string]interface{} - if err := json.Unmarshal(rawJSON, &data); err != nil { - // If that fails, try as string directly - var content string - if err2 := json.Unmarshal(rawJSON, &content); err2 == nil { - return truncateText(content, 500), nil - } - // Both failed, return first error - return "", fmt.Errorf("failed to unmarshal web page content: %v", err) - } - - // Look for common content fields - content := getString(data, "content", "text", "body", "article", "html", "markdown", "data") - if content == "" { - // If no content field, marshal the whole thing as a short string - summary := fmt.Sprintf("%v", data) - return truncateText(summary, 300), nil - } - return truncateText(content, 500), nil -} - -// Helper to get a string value from a map, trying multiple keys. -func getString(m map[string]interface{}, keys ...string) string { - for _, k := range keys { - if val, ok := m[k]; ok { - switch v := val.(type) { - case string: - return v - case fmt.Stringer: - return v.String() - default: - return fmt.Sprintf("%v", v) - } - } - } - return "" -} - -// Helper to truncate text and add ellipsis. -func truncateText(s string, maxLen int) string { - if len(s) <= maxLen { - return s - } - return s[:maxLen] + "..." -} \ No newline at end of file diff --git a/agent/request.go b/agent/request.go index 3b8d083..e10f03f 100644 --- a/agent/request.go +++ b/agent/request.go @@ -1,7 +1,10 @@ package agent import ( + "bytes" + "encoding/json" "gf-lt/config" + "gf-lt/models" "io" "log/slog" "net/http" @@ -23,9 +26,28 @@ func NewAgentClient(cfg *config.Config, log slog.Logger, gt func() string) *Agen } } +func (ag *AgentClient) FormMsg(sysprompt, msg string) (io.Reader, error) { + agentConvo := []models.RoleMsg{ + {Role: "system", Content: sysprompt}, + {Role: "user", Content: msg}, + } + agentChat := &models.ChatBody{ + Model: ag.cfg.CurrentModel, + Stream: true, + Messages: agentConvo, + } + b, err := json.Marshal(agentChat) + if err != nil { + ag.log.Error("failed to form agent msg", "error", err) + return nil, err + } + return bytes.NewReader(b), nil +} + func (ag *AgentClient) LLMRequest(body io.Reader) ([]byte, error) { req, err := http.NewRequest("POST", ag.cfg.CurrentAPI, body) if err != nil { + ag.log.Error("llamacpp api", "error", err) return nil, err } req.Header.Add("Accept", "application/json") diff --git a/agent/webagent.go b/agent/webagent.go new file mode 100644 index 0000000..0087e8e --- /dev/null +++ b/agent/webagent.go @@ -0,0 +1,34 @@ +package agent + +import ( + "fmt" + "log/slog" +) + +// WebAgentB is a simple agent that applies formatting functions +type WebAgentB struct { + *AgentClient + sysprompt string + log slog.Logger +} + +// NewWebAgentB creates a WebAgentB that uses the given formatting function +func NewWebAgentB(sysprompt string) *WebAgentB { + return &WebAgentB{sysprompt: sysprompt} +} + +// Process applies the formatting function to raw output +func (a *WebAgentB) Process(args map[string]string, rawOutput []byte) []byte { + msg, err := a.FormMsg(a.sysprompt, + fmt.Sprintf("request:\n%+v\ntool response:\n%v", args, string(rawOutput))) + if err != nil { + a.log.Error("failed to process the request", "error", err) + return []byte("failed to process the request; err: " + err.Error()) + } + resp, err := a.LLMRequest(msg) + if err != nil { + a.log.Error("failed to process the request", "error", err) + return []byte("failed to process the request; err: " + err.Error()) + } + return resp +} diff --git a/bot.go b/bot.go index f2683bb..8206c63 100644 --- a/bot.go +++ b/bot.go @@ -263,6 +263,7 @@ func fetchLCPModelName() *models.LCPModels { return nil } chatBody.Model = path.Base(llmModel.Data[0].ID) + cfg.CurrentModel = chatBody.Model return &llmModel } diff --git a/config/config.go b/config/config.go index eef8035..5b7cc35 100644 --- a/config/config.go +++ b/config/config.go @@ -12,7 +12,7 @@ type Config struct { ChatAPI string `toml:"ChatAPI"` CompletionAPI string `toml:"CompletionAPI"` CurrentAPI string - CurrentProvider string + CurrentModel string `toml:"CurrentModel"` APIMap map[string]string FetchModelNameAPI string `toml:"FetchModelNameAPI"` // ToolsAPI list? diff --git a/props_table.go b/props_table.go index 774ea32..ae225d8 100644 --- a/props_table.go +++ b/props_table.go @@ -161,6 +161,7 @@ func makePropsTable(props map[string]float32) *tview.Table { // Ensure chatBody.Model is in the new list; if not, set to first available model if len(newModelList) > 0 && !slices.Contains(newModelList, chatBody.Model) { chatBody.Model = newModelList[0] + cfg.CurrentModel = chatBody.Model // Update the displayed cell text - need to find model row // Search for model row by label for r := 0; r < table.GetRowCount(); r++ { @@ -179,6 +180,7 @@ func makePropsTable(props map[string]float32) *tview.Table { modelList := getModelListForAPI(cfg.CurrentAPI) addListPopupRow("Select a model", modelList, chatBody.Model, func(option string) { chatBody.Model = option + cfg.CurrentModel = chatBody.Model }) // Role selection dropdown addListPopupRow("Write next message as", listRolesWithUser(), cfg.WriteNextMsgAs, func(option string) { diff --git a/tui.go b/tui.go index 53c8cfd..d3ce14e 100644 --- a/tui.go +++ b/tui.go @@ -18,21 +18,21 @@ import ( ) var ( - app *tview.Application - pages *tview.Pages - textArea *tview.TextArea - editArea *tview.TextArea - textView *tview.TextView + app *tview.Application + pages *tview.Pages + textArea *tview.TextArea + editArea *tview.TextArea + textView *tview.TextView statusLineWidget *tview.TextView - helpView *tview.TextView - flex *tview.Flex - imgView *tview.Image - defaultImage = "sysprompts/llama.png" - indexPickWindow *tview.InputField - renameWindow *tview.InputField - roleEditWindow *tview.InputField - fullscreenMode bool - positionVisible bool = true + helpView *tview.TextView + flex *tview.Flex + imgView *tview.Image + defaultImage = "sysprompts/llama.png" + indexPickWindow *tview.InputField + renameWindow *tview.InputField + roleEditWindow *tview.InputField + fullscreenMode bool + positionVisible bool = true // pages historyPage = "historyPage" agentPage = "agentPage" @@ -984,12 +984,14 @@ func init() { if len(ORFreeModels) > 0 { currentORModelIndex = (currentORModelIndex + 1) % len(ORFreeModels) chatBody.Model = ORFreeModels[currentORModelIndex] + cfg.CurrentModel = chatBody.Model } updateStatusLine() } else { if len(LocalModels) > 0 { currentLocalModelIndex = (currentLocalModelIndex + 1) % len(LocalModels) chatBody.Model = LocalModels[currentLocalModelIndex] + cfg.CurrentModel = chatBody.Model } updateStatusLine() // // For non-OpenRouter APIs, use the old logic -- cgit v1.2.3 From a875abcf198dd2f85c518f8bf2c599db66d3e69f Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Fri, 19 Dec 2025 12:46:22 +0300 Subject: Enha: agentclient log --- agent/request.go | 4 ++++ agent/webagent.go | 10 ++++------ 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/agent/request.go b/agent/request.go index e10f03f..2d557ac 100644 --- a/agent/request.go +++ b/agent/request.go @@ -26,6 +26,10 @@ func NewAgentClient(cfg *config.Config, log slog.Logger, gt func() string) *Agen } } +func (ag *AgentClient) Log() *slog.Logger { + return &ag.log +} + func (ag *AgentClient) FormMsg(sysprompt, msg string) (io.Reader, error) { agentConvo := []models.RoleMsg{ {Role: "system", Content: sysprompt}, diff --git a/agent/webagent.go b/agent/webagent.go index 0087e8e..ff6cd86 100644 --- a/agent/webagent.go +++ b/agent/webagent.go @@ -2,19 +2,17 @@ package agent import ( "fmt" - "log/slog" ) // WebAgentB is a simple agent that applies formatting functions type WebAgentB struct { *AgentClient sysprompt string - log slog.Logger } // NewWebAgentB creates a WebAgentB that uses the given formatting function -func NewWebAgentB(sysprompt string) *WebAgentB { - return &WebAgentB{sysprompt: sysprompt} +func NewWebAgentB(client *AgentClient, sysprompt string) *WebAgentB { + return &WebAgentB{AgentClient: client, sysprompt: sysprompt} } // Process applies the formatting function to raw output @@ -22,12 +20,12 @@ func (a *WebAgentB) Process(args map[string]string, rawOutput []byte) []byte { msg, err := a.FormMsg(a.sysprompt, fmt.Sprintf("request:\n%+v\ntool response:\n%v", args, string(rawOutput))) if err != nil { - a.log.Error("failed to process the request", "error", err) + a.Log().Error("failed to process the request", "error", err) return []byte("failed to process the request; err: " + err.Error()) } resp, err := a.LLMRequest(msg) if err != nil { - a.log.Error("failed to process the request", "error", err) + a.Log().Error("failed to process the request", "error", err) return []byte("failed to process the request; err: " + err.Error()) } return resp -- cgit v1.2.3 From f779f039745f97f08f25967214d07716ce213326 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Fri, 19 Dec 2025 15:39:55 +0300 Subject: Enha: agent request builder --- agent/agent.go | 10 +++ agent/request.go | 192 +++++++++++++++++++++++++++++++++++++++++++++++++++---- bot.go | 72 +++++++++++++++++++-- bot_test.go | 134 ++++++++++++++++++++++++++++++++++++++ tools.go | 52 +++++++++++---- 5 files changed, 430 insertions(+), 30 deletions(-) diff --git a/agent/agent.go b/agent/agent.go index 5ad1ef1..8824ecb 100644 --- a/agent/agent.go +++ b/agent/agent.go @@ -33,3 +33,13 @@ func RegisterB(toolName string, a AgenterB) { func RegisterA(toolNames []string, a AgenterA) { RegistryA[a] = toolNames } + +// Get returns the agent registered for the given tool name, or nil if none. +func Get(toolName string) AgenterB { + return RegistryB[toolName] +} + +// Register is a convenience wrapper for RegisterB. +func Register(toolName string, a AgenterB) { + RegisterB(toolName, a) +} diff --git a/agent/request.go b/agent/request.go index 2d557ac..bb4a80d 100644 --- a/agent/request.go +++ b/agent/request.go @@ -3,15 +3,32 @@ package agent import ( "bytes" "encoding/json" + "fmt" "gf-lt/config" "gf-lt/models" "io" "log/slog" "net/http" + "strings" ) var httpClient = &http.Client{} +var defaultProps = map[string]float32{ + "temperature": 0.8, + "dry_multiplier": 0.0, + "min_p": 0.05, + "n_predict": -1.0, +} + +func detectAPI(api string) (isCompletion, isChat, isDeepSeek, isOpenRouter bool) { + isCompletion = strings.Contains(api, "/completion") && !strings.Contains(api, "/chat/completions") + isChat = strings.Contains(api, "/chat/completions") + isDeepSeek = strings.Contains(api, "deepseek.com") + isOpenRouter = strings.Contains(api, "openrouter.ai") + return +} + type AgentClient struct { cfg *config.Config getToken func() string @@ -31,38 +48,185 @@ func (ag *AgentClient) Log() *slog.Logger { } func (ag *AgentClient) FormMsg(sysprompt, msg string) (io.Reader, error) { - agentConvo := []models.RoleMsg{ + b, err := ag.buildRequest(sysprompt, msg) + if err != nil { + return nil, err + } + return bytes.NewReader(b), nil +} + +// buildRequest creates the appropriate LLM request based on the current API endpoint. +func (ag *AgentClient) buildRequest(sysprompt, msg string) ([]byte, error) { + api := ag.cfg.CurrentAPI + model := ag.cfg.CurrentModel + messages := []models.RoleMsg{ {Role: "system", Content: sysprompt}, {Role: "user", Content: msg}, } - agentChat := &models.ChatBody{ - Model: ag.cfg.CurrentModel, - Stream: true, - Messages: agentConvo, + + // Determine API type + isCompletion, isChat, isDeepSeek, isOpenRouter := detectAPI(api) + ag.log.Debug("agent building request", "api", api, "isCompletion", isCompletion, "isChat", isChat, "isDeepSeek", isDeepSeek, "isOpenRouter", isOpenRouter) + + // Build prompt for completion endpoints + if isCompletion { + var sb strings.Builder + for _, m := range messages { + sb.WriteString(m.ToPrompt()) + sb.WriteString("\n") + } + prompt := strings.TrimSpace(sb.String()) + + if isDeepSeek { + // DeepSeek completion + req := models.NewDSCompletionReq(prompt, model, defaultProps["temperature"], []string{}) + req.Stream = false // Agents don't need streaming + return json.Marshal(req) + } else if isOpenRouter { + // OpenRouter completion + req := models.NewOpenRouterCompletionReq(model, prompt, defaultProps, []string{}) + req.Stream = false // Agents don't need streaming + return json.Marshal(req) + } else { + // Assume llama.cpp completion + req := models.NewLCPReq(prompt, model, nil, defaultProps, []string{}) + req.Stream = false // Agents don't need streaming + return json.Marshal(req) + } } - b, err := json.Marshal(agentChat) - if err != nil { - ag.log.Error("failed to form agent msg", "error", err) - return nil, err + + // Chat completions endpoints + if isChat || !isCompletion { + chatBody := &models.ChatBody{ + Model: model, + Stream: false, // Agents don't need streaming + Messages: messages, + } + + if isDeepSeek { + // DeepSeek chat + req := models.NewDSChatReq(*chatBody) + return json.Marshal(req) + } else if isOpenRouter { + // OpenRouter chat + req := models.NewOpenRouterChatReq(*chatBody, defaultProps) + return json.Marshal(req) + } else { + // Assume llama.cpp chat (OpenAI format) + req := models.OpenAIReq{ + ChatBody: chatBody, + Tools: nil, + } + return json.Marshal(req) + } } - return bytes.NewReader(b), nil + + // Fallback (should not reach here) + ag.log.Warn("unknown API, using default chat completions format", "api", api) + chatBody := &models.ChatBody{ + Model: model, + Stream: false, // Agents don't need streaming + Messages: messages, + } + return json.Marshal(chatBody) } func (ag *AgentClient) LLMRequest(body io.Reader) ([]byte, error) { - req, err := http.NewRequest("POST", ag.cfg.CurrentAPI, body) + // Read the body for debugging (but we need to recreate it for the request) + bodyBytes, err := io.ReadAll(body) + if err != nil { + ag.log.Error("failed to read request body", "error", err) + return nil, err + } + + req, err := http.NewRequest("POST", ag.cfg.CurrentAPI, bytes.NewReader(bodyBytes)) if err != nil { - ag.log.Error("llamacpp api", "error", err) + ag.log.Error("failed to create request", "error", err) return nil, err } req.Header.Add("Accept", "application/json") req.Header.Add("Content-Type", "application/json") req.Header.Add("Authorization", "Bearer "+ag.getToken()) req.Header.Set("Accept-Encoding", "gzip") + + ag.log.Debug("agent LLM request", "url", ag.cfg.CurrentAPI, "body_preview", string(bodyBytes[:min(len(bodyBytes), 500)])) + resp, err := httpClient.Do(req) if err != nil { - ag.log.Error("llamacpp api", "error", err) + ag.log.Error("llamacpp api request failed", "error", err, "url", ag.cfg.CurrentAPI) return nil, err } defer resp.Body.Close() - return io.ReadAll(resp.Body) + + responseBytes, err := io.ReadAll(resp.Body) + if err != nil { + ag.log.Error("failed to read response", "error", err) + return nil, err + } + + if resp.StatusCode >= 400 { + ag.log.Error("agent LLM request failed", "status", resp.StatusCode, "response", string(responseBytes[:min(len(responseBytes), 1000)])) + return responseBytes, fmt.Errorf("HTTP %d: %s", resp.StatusCode, string(responseBytes[:min(len(responseBytes), 200)])) + } + + // Parse response and extract text content + text, err := extractTextFromResponse(responseBytes) + if err != nil { + ag.log.Error("failed to extract text from response", "error", err, "response_preview", string(responseBytes[:min(len(responseBytes), 500)])) + // Return raw response as fallback + return responseBytes, nil + } + + return []byte(text), nil +} + +// extractTextFromResponse parses common LLM response formats and extracts the text content. +func extractTextFromResponse(data []byte) (string, error) { + // Try to parse as generic JSON first + var genericResp map[string]interface{} + if err := json.Unmarshal(data, &genericResp); err != nil { + // Not JSON, return as string + return string(data), nil + } + + // Check for OpenAI chat completion format + if choices, ok := genericResp["choices"].([]interface{}); ok && len(choices) > 0 { + if firstChoice, ok := choices[0].(map[string]interface{}); ok { + // Chat completion: choices[0].message.content + if message, ok := firstChoice["message"].(map[string]interface{}); ok { + if content, ok := message["content"].(string); ok { + return content, nil + } + } + // Completion: choices[0].text + if text, ok := firstChoice["text"].(string); ok { + return text, nil + } + // Delta format for streaming (should not happen with stream: false) + if delta, ok := firstChoice["delta"].(map[string]interface{}); ok { + if content, ok := delta["content"].(string); ok { + return content, nil + } + } + } + } + + // Check for llama.cpp completion format + if content, ok := genericResp["content"].(string); ok { + return content, nil + } + + // Unknown format, return pretty-printed JSON + prettyJSON, err := json.MarshalIndent(genericResp, "", " ") + if err != nil { + return string(data), nil + } + return string(prettyJSON), nil +} + +func min(a, b int) int { + if a < b { + return a + } + return b } diff --git a/bot.go b/bot.go index 8206c63..779278e 100644 --- a/bot.go +++ b/bot.go @@ -6,6 +6,7 @@ import ( "context" "encoding/json" "fmt" + "strconv" "gf-lt/config" "gf-lt/extra" "gf-lt/models" @@ -659,14 +660,75 @@ func cleanChatBody() { } } +// convertJSONToMapStringString unmarshals JSON into map[string]interface{} and converts all values to strings. +func convertJSONToMapStringString(jsonStr string) (map[string]string, error) { + var raw map[string]interface{} + if err := json.Unmarshal([]byte(jsonStr), &raw); err != nil { + return nil, err + } + result := make(map[string]string, len(raw)) + for k, v := range raw { + switch val := v.(type) { + case string: + result[k] = val + case float64: + result[k] = strconv.FormatFloat(val, 'f', -1, 64) + case int, int64, int32: + // json.Unmarshal converts numbers to float64, but handle other integer types if they appear + result[k] = fmt.Sprintf("%v", val) + case bool: + result[k] = strconv.FormatBool(val) + case nil: + result[k] = "" + default: + result[k] = fmt.Sprintf("%v", val) + } + } + return result, nil +} + +// unmarshalFuncCall unmarshals a JSON tool call, converting numeric arguments to strings. +func unmarshalFuncCall(jsonStr string) (*models.FuncCall, error) { + type tempFuncCall struct { + ID string `json:"id,omitempty"` + Name string `json:"name"` + Args map[string]interface{} `json:"args"` + } + var temp tempFuncCall + if err := json.Unmarshal([]byte(jsonStr), &temp); err != nil { + return nil, err + } + fc := &models.FuncCall{ + ID: temp.ID, + Name: temp.Name, + Args: make(map[string]string, len(temp.Args)), + } + for k, v := range temp.Args { + switch val := v.(type) { + case string: + fc.Args[k] = val + case float64: + fc.Args[k] = strconv.FormatFloat(val, 'f', -1, 64) + case int, int64, int32: + fc.Args[k] = fmt.Sprintf("%v", val) + case bool: + fc.Args[k] = strconv.FormatBool(val) + case nil: + fc.Args[k] = "" + default: + fc.Args[k] = fmt.Sprintf("%v", val) + } + } + return fc, nil +} + func findCall(msg, toolCall string, tv *tview.TextView) { fc := &models.FuncCall{} if toolCall != "" { // HTML-decode the tool call string to handle encoded characters like < -> <= decodedToolCall := html.UnescapeString(toolCall) - openAIToolMap := make(map[string]string) - // respect tool call - if err := json.Unmarshal([]byte(decodedToolCall), &openAIToolMap); err != nil { + openAIToolMap, err := convertJSONToMapStringString(decodedToolCall) + if err != nil { logger.Error("failed to unmarshal openai tool call", "call", decodedToolCall, "error", err) // Send error response to LLM so it can retry or handle the error toolResponseMsg := models.RoleMsg{ @@ -700,7 +762,9 @@ func findCall(msg, toolCall string, tv *tview.TextView) { jsStr = strings.TrimSuffix(strings.TrimPrefix(jsStr, prefix), suffix) // HTML-decode the JSON string to handle encoded characters like < -> <= decodedJsStr := html.UnescapeString(jsStr) - if err := json.Unmarshal([]byte(decodedJsStr), &fc); err != nil { + var err error + fc, err = unmarshalFuncCall(decodedJsStr) + if err != nil { logger.Error("failed to unmarshal tool call", "error", err, "json_string", decodedJsStr) // Send error response to LLM so it can retry or handle the error toolResponseMsg := models.RoleMsg{ diff --git a/bot_test.go b/bot_test.go index 2d59c3c..d2956a9 100644 --- a/bot_test.go +++ b/bot_test.go @@ -152,4 +152,138 @@ func TestConsolidateConsecutiveAssistantMessages(t *testing.T) { } }) } +} + +func TestUnmarshalFuncCall(t *testing.T) { + tests := []struct { + name string + jsonStr string + want *models.FuncCall + wantErr bool + }{ + { + name: "simple websearch with numeric limit", + jsonStr: `{"name": "websearch", "args": {"query": "current weather in London", "limit": 3}}`, + want: &models.FuncCall{ + Name: "websearch", + Args: map[string]string{"query": "current weather in London", "limit": "3"}, + }, + wantErr: false, + }, + { + name: "string limit", + jsonStr: `{"name": "websearch", "args": {"query": "test", "limit": "5"}}`, + want: &models.FuncCall{ + Name: "websearch", + Args: map[string]string{"query": "test", "limit": "5"}, + }, + wantErr: false, + }, + { + name: "boolean arg", + jsonStr: `{"name": "test", "args": {"flag": true}}`, + want: &models.FuncCall{ + Name: "test", + Args: map[string]string{"flag": "true"}, + }, + wantErr: false, + }, + { + name: "null arg", + jsonStr: `{"name": "test", "args": {"opt": null}}`, + want: &models.FuncCall{ + Name: "test", + Args: map[string]string{"opt": ""}, + }, + wantErr: false, + }, + { + name: "float arg", + jsonStr: `{"name": "test", "args": {"ratio": 0.5}}`, + want: &models.FuncCall{ + Name: "test", + Args: map[string]string{"ratio": "0.5"}, + }, + wantErr: false, + }, + { + name: "invalid JSON", + jsonStr: `{invalid}`, + want: nil, + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := unmarshalFuncCall(tt.jsonStr) + if (err != nil) != tt.wantErr { + t.Errorf("unmarshalFuncCall() error = %v, wantErr %v", err, tt.wantErr) + return + } + if tt.wantErr { + return + } + if got.Name != tt.want.Name { + t.Errorf("unmarshalFuncCall() name = %v, want %v", got.Name, tt.want.Name) + } + if len(got.Args) != len(tt.want.Args) { + t.Errorf("unmarshalFuncCall() args length = %v, want %v", len(got.Args), len(tt.want.Args)) + } + for k, v := range tt.want.Args { + if got.Args[k] != v { + t.Errorf("unmarshalFuncCall() args[%v] = %v, want %v", k, got.Args[k], v) + } + } + }) + } +} + +func TestConvertJSONToMapStringString(t *testing.T) { + tests := []struct { + name string + jsonStr string + want map[string]string + wantErr bool + }{ + { + name: "simple map", + jsonStr: `{"query": "weather", "limit": 5}`, + want: map[string]string{"query": "weather", "limit": "5"}, + wantErr: false, + }, + { + name: "boolean and null", + jsonStr: `{"flag": true, "opt": null}`, + want: map[string]string{"flag": "true", "opt": ""}, + wantErr: false, + }, + { + name: "invalid JSON", + jsonStr: `{invalid`, + want: nil, + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := convertJSONToMapStringString(tt.jsonStr) + if (err != nil) != tt.wantErr { + t.Errorf("convertJSONToMapStringString() error = %v, wantErr %v", err, tt.wantErr) + return + } + if tt.wantErr { + return + } + if len(got) != len(tt.want) { + t.Errorf("convertJSONToMapStringString() length = %v, want %v", len(got), len(tt.want)) + } + for k, v := range tt.want { + if got[k] != v { + t.Errorf("convertJSONToMapStringString()[%v] = %v, want %v", k, got[k], v) + } + } + }) + } } \ No newline at end of file diff --git a/tools.go b/tools.go index e4af7ad..49d8192 100644 --- a/tools.go +++ b/tools.go @@ -13,6 +13,7 @@ import ( "regexp" "strconv" "strings" + "sync" "time" ) @@ -126,7 +127,9 @@ under the topic: Adam's number is stored: After that you are free to respond to the user. ` - basicCard = &models.CharCard{ + webSearchSysPrompt = `Summarize the web search results, extracting key information and presenting a concise answer. Provide sources and URLs where relevant.` + readURLSysPrompt = `Extract and summarize the content from the webpage. Provide key information, main points, and any relevant details.` + basicCard = &models.CharCard{ SysPrompt: basicSysMsg, FirstMsg: defaultFirstMsg, Role: "", @@ -141,8 +144,43 @@ After that you are free to respond to the user. // sysMap = map[string]string{"basic_sys": basicSysMsg, "tool_sys": toolSysMsg} sysMap = map[string]*models.CharCard{"basic_sys": basicCard} sysLabels = []string{"basic_sys"} + + webAgentClient *agent.AgentClient + webAgentClientOnce sync.Once + webAgentsOnce sync.Once ) +// getWebAgentClient returns a singleton AgentClient for web agents. +func getWebAgentClient() *agent.AgentClient { + webAgentClientOnce.Do(func() { + if cfg == nil { + panic("cfg not initialized") + } + if logger == nil { + panic("logger not initialized") + } + getToken := func() string { + if chunkParser == nil { + return "" + } + return chunkParser.GetToken() + } + webAgentClient = agent.NewAgentClient(cfg, *logger, getToken) + }) + return webAgentClient +} + +// registerWebAgents registers WebAgentB instances for websearch and read_url tools. +func registerWebAgents() { + webAgentsOnce.Do(func() { + client := getWebAgentClient() + // Register websearch agent + agent.Register("websearch", agent.NewWebAgentB(client, webSearchSysPrompt)) + // Register read_url agent + agent.Register("read_url", agent.NewWebAgentB(client, readURLSysPrompt)) + }) +} + // web search (depends on extra server) func websearch(args map[string]string) []byte { // make http request return bytes @@ -597,7 +635,6 @@ var globalTodoList = TodoList{ Items: []TodoItem{}, } - // Todo Management Tools func todoCreate(args map[string]string) []byte { task, ok := args["task"] @@ -851,6 +888,7 @@ var fnMap = map[string]fnSig{ // callToolWithAgent calls the tool and applies any registered agent. func callToolWithAgent(name string, args map[string]string) []byte { + registerWebAgents() f, ok := fnMap[name] if !ok { return []byte(fmt.Sprintf("tool %s not found", name)) @@ -862,16 +900,6 @@ func callToolWithAgent(name string, args map[string]string) []byte { return raw } -// registerDefaultAgents registers default agents for formatting. -func registerDefaultAgents() { - agent.Register("websearch", agent.DefaultFormatter("websearch")) - agent.Register("read_url", agent.DefaultFormatter("read_url")) -} - -func init() { - registerDefaultAgents() -} - // openai style def var baseTools = []models.Tool{ // websearch -- cgit v1.2.3 From 12f11388e72c4ce3314055d777f3034e31cdea77 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Fri, 19 Dec 2025 16:14:24 +0300 Subject: Feat: notify on load --- bot.go | 74 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 74 insertions(+) diff --git a/bot.go b/bot.go index 779278e..1603e0d 100644 --- a/bot.go +++ b/bot.go @@ -199,6 +199,18 @@ func warmUpModel() { if host != "localhost" && host != "127.0.0.1" && host != "::1" { return } + // Check if model is already loaded + loaded, err := isModelLoaded(chatBody.Model) + if err != nil { + logger.Debug("failed to check model status", "model", chatBody.Model, "error", err) + // Continue with warmup attempt anyway + } + if loaded { + if err := notifyUser("model already loaded", "Model "+chatBody.Model+" is already loaded."); err != nil { + logger.Debug("failed to notify user", "error", err) + } + return + } go func() { var data []byte var err error @@ -239,6 +251,8 @@ func warmUpModel() { return } resp.Body.Close() + // Start monitoring for model load completion + monitorModelLoad(chatBody.Model) }() } @@ -329,6 +343,66 @@ func fetchLCPModels() ([]string, error) { return localModels, nil } +// fetchLCPModelsWithStatus returns the full LCPModels struct including status information. +func fetchLCPModelsWithStatus() (*models.LCPModels, error) { + resp, err := http.Get(cfg.FetchModelNameAPI) + if err != nil { + return nil, err + } + defer resp.Body.Close() + if resp.StatusCode != 200 { + err := fmt.Errorf("failed to fetch llama.cpp models; status: %s", resp.Status) + return nil, err + } + data := &models.LCPModels{} + if err := json.NewDecoder(resp.Body).Decode(data); err != nil { + return nil, err + } + return data, nil +} + +// isModelLoaded checks if the given model ID is currently loaded in llama.cpp server. +func isModelLoaded(modelID string) (bool, error) { + models, err := fetchLCPModelsWithStatus() + if err != nil { + return false, err + } + for _, m := range models.Data { + if m.ID == modelID { + return m.Status.Value == "loaded", nil + } + } + return false, nil +} + +// monitorModelLoad starts a goroutine that periodically checks if the specified model is loaded. +func monitorModelLoad(modelID string) { + go func() { + timeout := time.After(2 * time.Minute) // max wait 2 minutes + ticker := time.NewTicker(2 * time.Second) + defer ticker.Stop() + for { + select { + case <-timeout: + logger.Debug("model load monitoring timeout", "model", modelID) + return + case <-ticker.C: + loaded, err := isModelLoaded(modelID) + if err != nil { + logger.Debug("failed to check model status", "model", modelID, "error", err) + continue + } + if loaded { + if err := notifyUser("model loaded", "Model "+modelID+" is now loaded and ready."); err != nil { + logger.Debug("failed to notify user", "error", err) + } + return + } + } + } + }() +} + // sendMsgToLLM expects streaming resp func sendMsgToLLM(body io.Reader) { choseChunkParser() -- cgit v1.2.3 From 25229d7c6fc5ae6332421050baa5757efcedb267 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Fri, 19 Dec 2025 18:42:55 +0300 Subject: Enha: tool advice msg only after user --- llm.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/llm.go b/llm.go index 3a2cf9b..a557e3d 100644 --- a/llm.go +++ b/llm.go @@ -122,7 +122,7 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages)) } } - if cfg.ToolUse && !resume { + if cfg.ToolUse && !resume && role == cfg.UserRole { // add to chat body chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) } @@ -358,7 +358,7 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader logger.Debug("DeepSeekerCompletion: RAG message added to chat body", "message_count", len(chatBody.Messages)) } } - if cfg.ToolUse && !resume { + if cfg.ToolUse && !resume && role == cfg.UserRole { // add to chat body chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) } @@ -420,7 +420,7 @@ func (ds DeepSeekerChat) GetToken() string { func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, error) { logger.Debug("formmsg deepseekerchat", "link", cfg.CurrentAPI) - if cfg.ToolUse && !resume { + if cfg.ToolUse && !resume && role == cfg.UserRole { // prompt += "\n" + cfg.ToolRole + ":\n" + toolSysMsg // add to chat body chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) @@ -516,7 +516,7 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader logger.Debug("RAG message added to chat body", "message_count", len(chatBody.Messages)) } } - if cfg.ToolUse && !resume { + if cfg.ToolUse && !resume && role == cfg.UserRole { // add to chat body chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg}) } -- cgit v1.2.3 From 8c18b1b74cd53584988ab8cd55e50be81aa9aca5 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Sat, 20 Dec 2025 11:04:57 +0300 Subject: Enha: remove old tool calls --- bot.go | 62 +++++++++++++++++++++++++++++++++++++----------------------- helpfuncs.go | 12 ++++++++++++ 2 files changed, 50 insertions(+), 24 deletions(-) diff --git a/bot.go b/bot.go index 1603e0d..054c310 100644 --- a/bot.go +++ b/bot.go @@ -6,7 +6,6 @@ import ( "context" "encoding/json" "fmt" - "strconv" "gf-lt/config" "gf-lt/extra" "gf-lt/models" @@ -20,6 +19,7 @@ import ( "net/url" "os" "path" + "strconv" "strings" "time" @@ -86,19 +86,31 @@ func cleanNullMessages(messages []models.RoleMsg) []models.RoleMsg { return consolidateConsecutiveAssistantMessages(messages) } +func cleanToolCalls(messages []models.RoleMsg) []models.RoleMsg { + cleaned := make([]models.RoleMsg, 0, len(messages)) + for i, msg := range messages { + // recognize the message as the tool call and remove it + if msg.ToolCallID == "" { + cleaned = append(cleaned, msg) + } + // tool call in last msg should stay + if i == len(messages)-1 { + cleaned = append(cleaned, msg) + } + } + return consolidateConsecutiveAssistantMessages(cleaned) +} + // consolidateConsecutiveAssistantMessages merges consecutive assistant messages into a single message func consolidateConsecutiveAssistantMessages(messages []models.RoleMsg) []models.RoleMsg { if len(messages) == 0 { return messages } - consolidated := make([]models.RoleMsg, 0, len(messages)) currentAssistantMsg := models.RoleMsg{} isBuildingAssistantMsg := false - for i := 0; i < len(messages); i++ { msg := messages[i] - if msg.Role == cfg.AssistantRole || msg.Role == cfg.WriteNextMsgAsCompletionAgent { // If this is an assistant message, start or continue building if !isBuildingAssistantMsg { @@ -143,12 +155,10 @@ func consolidateConsecutiveAssistantMessages(messages []models.RoleMsg) []models consolidated = append(consolidated, msg) } } - // Don't forget the last assistant message if we were building one if isBuildingAssistantMsg { consolidated = append(consolidated, currentAssistantMsg) } - return consolidated } @@ -483,6 +493,7 @@ func sendMsgToLLM(body io.Reader) { streamDone <- true break } + // // problem: this catches any mention of the word 'error' // Handle error messages in response content // example needed, since llm could use the word error in the normal msg // if string(line) != "" && strings.Contains(strings.ToLower(string(line)), "error") { @@ -691,20 +702,16 @@ out: Role: botPersona, Content: respText.String(), }) } - logger.Debug("chatRound: before cleanChatBody", "messages_before_clean", len(chatBody.Messages)) for i, msg := range chatBody.Messages { logger.Debug("chatRound: before cleaning", "index", i, "role", msg.Role, "content_len", len(msg.Content), "has_content", msg.HasContent(), "tool_call_id", msg.ToolCallID) } - // // Clean null/empty messages to prevent API issues with endpoints like llama.cpp jinja template cleanChatBody() - logger.Debug("chatRound: after cleanChatBody", "messages_after_clean", len(chatBody.Messages)) for i, msg := range chatBody.Messages { logger.Debug("chatRound: after cleaning", "index", i, "role", msg.Role, "content_len", len(msg.Content), "has_content", msg.HasContent(), "tool_call_id", msg.ToolCallID) } - colorText() updateStatusLine() // bot msg is done; @@ -718,19 +725,19 @@ out: // cleanChatBody removes messages with null or empty content to prevent API issues func cleanChatBody() { - if chatBody != nil && chatBody.Messages != nil { - originalLen := len(chatBody.Messages) - logger.Debug("cleanChatBody: before cleaning", "message_count", originalLen) - for i, msg := range chatBody.Messages { - logger.Debug("cleanChatBody: before clean", "index", i, "role", msg.Role, "content_len", len(msg.Content), "has_content", msg.HasContent(), "tool_call_id", msg.ToolCallID) - } - - chatBody.Messages = cleanNullMessages(chatBody.Messages) - - logger.Debug("cleanChatBody: after cleaning", "original_len", originalLen, "new_len", len(chatBody.Messages)) - for i, msg := range chatBody.Messages { - logger.Debug("cleanChatBody: after clean", "index", i, "role", msg.Role, "content_len", len(msg.Content), "has_content", msg.HasContent(), "tool_call_id", msg.ToolCallID) - } + if chatBody == nil || chatBody.Messages == nil { + return + } + originalLen := len(chatBody.Messages) + logger.Debug("cleanChatBody: before cleaning", "message_count", originalLen) + for i, msg := range chatBody.Messages { + logger.Debug("cleanChatBody: before clean", "index", i, "role", msg.Role, "content_len", len(msg.Content), "has_content", msg.HasContent(), "tool_call_id", msg.ToolCallID) + } + chatBody.Messages = cleanToolCalls(chatBody.Messages) + chatBody.Messages = cleanNullMessages(chatBody.Messages) + logger.Debug("cleanChatBody: after cleaning", "original_len", originalLen, "new_len", len(chatBody.Messages)) + for i, msg := range chatBody.Messages { + logger.Debug("cleanChatBody: after clean", "index", i, "role", msg.Role, "content_len", len(msg.Content), "has_content", msg.HasContent(), "tool_call_id", msg.ToolCallID) } } @@ -852,6 +859,14 @@ func findCall(msg, toolCall string, tv *tview.TextView) { return } } + // we got here => last msg recognized as a tool call (correct or not) + // make sure it has ToolCallID + if chatBody.Messages[len(chatBody.Messages)-1].ToolCallID == "" { + chatBody.Messages[len(chatBody.Messages)-1].ToolCallID = randString(6) + } + if lastToolCallID == "" { + lastToolCallID = chatBody.Messages[len(chatBody.Messages)-1].ToolCallID + } // call a func _, ok := fnMap[fc.Name] if !ok { @@ -866,7 +881,6 @@ func findCall(msg, toolCall string, tv *tview.TextView) { logger.Debug("findCall: added tool not implemented response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "tool_call_id", toolResponseMsg.ToolCallID, "message_count_after_add", len(chatBody.Messages)) // Clear the stored tool call ID after using it lastToolCallID = "" - // Trigger the assistant to continue processing with the new tool response // by calling chatRound with empty content to continue the assistant's response chatRound("", cfg.AssistantRole, tv, false, false) diff --git a/helpfuncs.go b/helpfuncs.go index 194d68c..30d9967 100644 --- a/helpfuncs.go +++ b/helpfuncs.go @@ -9,6 +9,8 @@ import ( "path" "strings" "unicode" + + "math/rand/v2" ) func isASCII(s string) bool { @@ -239,3 +241,13 @@ func makeStatusLine() string { isRecording, persona, botPersona, injectRole) return statusLine + imageInfo + shellModeInfo } + +var letters = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ") + +func randString(n int) string { + b := make([]rune, n) + for i := range b { + b[i] = letters[rand.IntN(len(letters))] + } + return string(b) +} -- cgit v1.2.3 From 0ca709b7c679c641724a3a8c2fc1425286b4955a Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Sat, 20 Dec 2025 11:34:02 +0300 Subject: Enha: update lasttoolcallid --- bot.go | 33 ++++++++++++++++++--------------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/bot.go b/bot.go index 054c310..d84cfad 100644 --- a/bot.go +++ b/bot.go @@ -49,7 +49,6 @@ var ( ragger *rag.RAG chunkParser ChunkParser lastToolCall *models.FuncCall - lastToolCallID string // Store the ID of the most recent tool call //nolint:unused // TTS_ENABLED conditionally uses this orator extra.Orator asr extra.STT @@ -520,7 +519,7 @@ func sendMsgToLLM(body io.Reader) { if chunk.FuncName != "" { lastToolCall.Name = chunk.FuncName // Store the tool call ID for the response - lastToolCallID = chunk.ToolID + lastToolCall.ID = chunk.ToolID } interrupt: if interruptResp { // read bytes, so it would not get into beginning of the next req @@ -811,26 +810,25 @@ func findCall(msg, toolCall string, tv *tview.TextView) { openAIToolMap, err := convertJSONToMapStringString(decodedToolCall) if err != nil { logger.Error("failed to unmarshal openai tool call", "call", decodedToolCall, "error", err) + // Ensure lastToolCall.ID is set for the error response (already set from chunk) // Send error response to LLM so it can retry or handle the error toolResponseMsg := models.RoleMsg{ Role: cfg.ToolRole, Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err), - ToolCallID: lastToolCallID, // Use the stored tool call ID + ToolCallID: lastToolCall.ID, // Use the stored tool call ID } chatBody.Messages = append(chatBody.Messages, toolResponseMsg) - // Clear the stored tool call ID after using it - lastToolCallID = "" + // Clear the stored tool call ID after using it (no longer needed) // Trigger the assistant to continue processing with the error message chatRound("", cfg.AssistantRole, tv, false, false) return } lastToolCall.Args = openAIToolMap fc = lastToolCall - // Ensure lastToolCallID is set if it's available in the tool call - if lastToolCallID == "" && len(openAIToolMap) > 0 { - // Attempt to extract ID from the parsed tool call if not already set + // Set lastToolCall.ID from parsed tool call ID if available + if len(openAIToolMap) > 0 { if id, exists := openAIToolMap["id"]; exists { - lastToolCallID = id + lastToolCall.ID = id } } } else { @@ -858,14 +856,19 @@ func findCall(msg, toolCall string, tv *tview.TextView) { chatRound("", cfg.AssistantRole, tv, false, false) return } + // Update lastToolCall with parsed function call + lastToolCall.ID = fc.ID + lastToolCall.Name = fc.Name + lastToolCall.Args = fc.Args } // we got here => last msg recognized as a tool call (correct or not) // make sure it has ToolCallID if chatBody.Messages[len(chatBody.Messages)-1].ToolCallID == "" { chatBody.Messages[len(chatBody.Messages)-1].ToolCallID = randString(6) } - if lastToolCallID == "" { - lastToolCallID = chatBody.Messages[len(chatBody.Messages)-1].ToolCallID + // Ensure lastToolCall.ID is set, fallback to assistant message's ToolCallID + if lastToolCall.ID == "" { + lastToolCall.ID = chatBody.Messages[len(chatBody.Messages)-1].ToolCallID } // call a func _, ok := fnMap[fc.Name] @@ -875,12 +878,12 @@ func findCall(msg, toolCall string, tv *tview.TextView) { toolResponseMsg := models.RoleMsg{ Role: cfg.ToolRole, Content: m, - ToolCallID: lastToolCallID, // Use the stored tool call ID + ToolCallID: lastToolCall.ID, // Use the stored tool call ID } chatBody.Messages = append(chatBody.Messages, toolResponseMsg) logger.Debug("findCall: added tool not implemented response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "tool_call_id", toolResponseMsg.ToolCallID, "message_count_after_add", len(chatBody.Messages)) // Clear the stored tool call ID after using it - lastToolCallID = "" + lastToolCall.ID = "" // Trigger the assistant to continue processing with the new tool response // by calling chatRound with empty content to continue the assistant's response chatRound("", cfg.AssistantRole, tv, false, false) @@ -895,12 +898,12 @@ func findCall(msg, toolCall string, tv *tview.TextView) { toolResponseMsg := models.RoleMsg{ Role: cfg.ToolRole, Content: toolMsg, - ToolCallID: lastToolCallID, // Use the stored tool call ID + ToolCallID: lastToolCall.ID, // Use the stored tool call ID } chatBody.Messages = append(chatBody.Messages, toolResponseMsg) logger.Debug("findCall: added actual tool response", "role", toolResponseMsg.Role, "content_len", len(toolResponseMsg.Content), "tool_call_id", toolResponseMsg.ToolCallID, "message_count_after_add", len(chatBody.Messages)) // Clear the stored tool call ID after using it - lastToolCallID = "" + lastToolCall.ID = "" // Trigger the assistant to continue processing with the new tool response // by calling chatRound with empty content to continue the assistant's response chatRound("", cfg.AssistantRole, tv, false, false) -- cgit v1.2.3 From ba3330ee54bcab5cfde470f8e465fc9ed1c6cb2c Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Sat, 20 Dec 2025 14:21:40 +0300 Subject: Fix: model load if llama.cpp started after gf-lt --- bot.go | 22 ++++++++++++++++++++++ props_table.go | 7 +++++++ tui.go | 5 +++++ 3 files changed, 34 insertions(+) diff --git a/bot.go b/bot.go index d84cfad..7c8ea75 100644 --- a/bot.go +++ b/bot.go @@ -21,6 +21,7 @@ import ( "path" "strconv" "strings" + "sync" "time" "github.com/neurosnap/sentences/english" @@ -52,6 +53,7 @@ var ( //nolint:unused // TTS_ENABLED conditionally uses this orator extra.Orator asr extra.STT + localModelsMu sync.RWMutex defaultLCPProps = map[string]float32{ "temperature": 0.8, "dry_multiplier": 0.0, @@ -1002,12 +1004,32 @@ func updateModelLists() { } } // if llama.cpp started after gf-lt? + localModelsMu.Lock() LocalModels, err = fetchLCPModels() + localModelsMu.Unlock() if err != nil { logger.Warn("failed to fetch llama.cpp models", "error", err) } } +func refreshLocalModelsIfEmpty() { + localModelsMu.RLock() + if len(LocalModels) > 0 { + localModelsMu.RUnlock() + return + } + localModelsMu.RUnlock() + // try to fetch + models, err := fetchLCPModels() + if err != nil { + logger.Warn("failed to fetch llama.cpp models", "error", err) + return + } + localModelsMu.Lock() + LocalModels = models + localModelsMu.Unlock() +} + func init() { var err error cfg, err = config.LoadConfig("config.toml") diff --git a/props_table.go b/props_table.go index ae225d8..0c49056 100644 --- a/props_table.go +++ b/props_table.go @@ -5,11 +5,14 @@ import ( "slices" "strconv" "strings" + "sync" "github.com/gdamore/tcell/v2" "github.com/rivo/tview" ) +var _ = sync.RWMutex{} + // Define constants for cell types const ( CellTypeCheckbox = "checkbox" @@ -138,6 +141,10 @@ func makePropsTable(props map[string]float32) *tview.Table { } else if strings.Contains(api, "openrouter.ai") { return ORFreeModels } + // Assume local llama.cpp + refreshLocalModelsIfEmpty() + localModelsMu.RLock() + defer localModelsMu.RUnlock() return LocalModels } var modelRowIndex int // will be set before model row is added diff --git a/tui.go b/tui.go index d3ce14e..4313d26 100644 --- a/tui.go +++ b/tui.go @@ -12,11 +12,14 @@ import ( "path" "strconv" "strings" + "sync" "github.com/gdamore/tcell/v2" "github.com/rivo/tview" ) +var _ = sync.RWMutex{} + var ( app *tview.Application pages *tview.Pages @@ -988,11 +991,13 @@ func init() { } updateStatusLine() } else { + localModelsMu.RLock() if len(LocalModels) > 0 { currentLocalModelIndex = (currentLocalModelIndex + 1) % len(LocalModels) chatBody.Model = LocalModels[currentLocalModelIndex] cfg.CurrentModel = chatBody.Model } + localModelsMu.RUnlock() updateStatusLine() // // For non-OpenRouter APIs, use the old logic // go func() { -- cgit v1.2.3 From 8b19e5a3182aa55d565a2909b0acbae39bdf8d3c Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Sun, 21 Dec 2025 09:39:37 +0300 Subject: Enha: add ts columns for chat table --- tables.go | 65 +++++++++++++++++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 57 insertions(+), 8 deletions(-) diff --git a/tables.go b/tables.go index 8c10a2c..87c3bbb 100644 --- a/tables.go +++ b/tables.go @@ -23,43 +23,92 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table { chatList[i] = name i++ } - rows, cols := len(chatMap), len(actions)+2 + + // Add 1 extra row for header + rows, cols := len(chatMap)+1, len(actions)+4 // +2 for name, +2 for timestamps chatActTable := tview.NewTable(). SetBorders(true) - for r := 0; r < rows; r++ { + + // Add header row (row 0) + for c := 0; c < cols; c++ { + color := tcell.ColorWhite + headerText := "" + switch c { + case 0: + headerText = "Chat Name" + case 1: + headerText = "Preview" + case 2: + headerText = "Created At" + case 3: + headerText = "Updated At" + default: + headerText = actions[c-4] + } + chatActTable.SetCell(0, c, + tview.NewTableCell(headerText). + SetSelectable(false). + SetTextColor(color). + SetAlign(tview.AlignCenter). + SetAttributes(tcell.AttrBold)) + } + + // Add data rows (starting from row 1) + for r := 0; r < rows-1; r++ { // rows-1 because we added a header row for c := 0; c < cols; c++ { color := tcell.ColorWhite switch c { case 0: - chatActTable.SetCell(r, c, + chatActTable.SetCell(r+1, c, // +1 to account for header row tview.NewTableCell(chatList[r]). SetSelectable(false). SetTextColor(color). SetAlign(tview.AlignCenter)) case 1: - chatActTable.SetCell(r, c, + chatActTable.SetCell(r+1, c, // +1 to account for header row tview.NewTableCell(chatMap[chatList[r]].Msgs[len(chatMap[chatList[r]].Msgs)-30:]). SetSelectable(false). SetTextColor(color). SetAlign(tview.AlignCenter)) + case 2: + // Created At column + chatActTable.SetCell(r+1, c, // +1 to account for header row + tview.NewTableCell(chatMap[chatList[r]].CreatedAt.Format("2006-01-02 15:04")). + SetSelectable(false). + SetTextColor(color). + SetAlign(tview.AlignCenter)) + case 3: + // Updated At column + chatActTable.SetCell(r+1, c, // +1 to account for header row + tview.NewTableCell(chatMap[chatList[r]].UpdatedAt.Format("2006-01-02 15:04")). + SetSelectable(false). + SetTextColor(color). + SetAlign(tview.AlignCenter)) default: - chatActTable.SetCell(r, c, - tview.NewTableCell(actions[c-2]). + chatActTable.SetCell(r+1, c, // +1 to account for header row + tview.NewTableCell(actions[c-4]). // Adjusted offset to account for 2 new timestamp columns SetTextColor(color). SetAlign(tview.AlignCenter)) } } } - chatActTable.Select(0, 0).SetSelectable(true, true).SetFixed(1, 1).SetDoneFunc(func(key tcell.Key) { + chatActTable.Select(1, 0).SetSelectable(true, true).SetFixed(1, 1).SetDoneFunc(func(key tcell.Key) { if key == tcell.KeyEsc || key == tcell.KeyF1 || key == tcell.Key('x') { pages.RemovePage(historyPage) return } }).SetSelectedFunc(func(row int, column int) { + // Skip header row (row 0) for selection + if row == 0 { + // If user clicks on header, just return without action + chatActTable.Select(1, column) // Move selection to first data row + return + } + tc := chatActTable.GetCell(row, column) tc.SetTextColor(tcell.ColorRed) chatActTable.SetSelectable(false, false) - selectedChat := chatList[row] + selectedChat := chatList[row-1] // -1 to account for header row defer pages.RemovePage(historyPage) switch tc.Text { case "load": -- cgit v1.2.3 From 5525c946613a6f726cd116d79f1505a63ab25806 Mon Sep 17 00:00:00 2001 From: Grail Finder Date: Sun, 21 Dec 2025 09:46:07 +0300 Subject: Chore: add empty line between messages --- bot.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot.go b/bot.go index 7c8ea75..8ddcee5 100644 --- a/bot.go +++ b/bot.go @@ -734,6 +734,8 @@ func cleanChatBody() { for i, msg := range chatBody.Messages { logger.Debug("cleanChatBody: before clean", "index", i, "role", msg.Role, "content_len", len(msg.Content), "has_content", msg.HasContent(), "tool_call_id", msg.ToolCallID) } + // TODO: consider case where we keep tool requests + // /completion msg where part meant for user and other part tool call chatBody.Messages = cleanToolCalls(chatBody.Messages) chatBody.Messages = cleanNullMessages(chatBody.Messages) logger.Debug("cleanChatBody: after cleaning", "original_len", originalLen, "new_len", len(chatBody.Messages)) @@ -925,7 +927,7 @@ func chatToTextSlice(showSys bool) []string { func chatToText(showSys bool) string { s := chatToTextSlice(showSys) - return strings.Join(s, "") + return strings.Join(s, "\n") } func removeThinking(chatBody *models.ChatBody) { -- cgit v1.2.3