diff options
| -rw-r--r-- | agent/request.go | 14 | ||||
| -rw-r--r-- | bot.go | 21 | ||||
| -rw-r--r-- | bot_test.go | 2 | ||||
| -rw-r--r-- | llm.go | 12 | ||||
| -rw-r--r-- | models/db.go | 2 | ||||
| -rw-r--r-- | models/models.go | 32 | ||||
| -rw-r--r-- | models/openrouter.go | 3 | ||||
| -rw-r--r-- | pngmeta/altwriter.go | 2 | ||||
| -rw-r--r-- | props_table.go | 7 | ||||
| -rw-r--r-- | rag/rag.go | 4 | ||||
| -rw-r--r-- | tables.go | 65 | ||||
| -rw-r--r-- | tui.go | 25 |
12 files changed, 98 insertions, 91 deletions
diff --git a/agent/request.go b/agent/request.go index bb4a80d..14009dd 100644 --- a/agent/request.go +++ b/agent/request.go @@ -77,17 +77,18 @@ func (ag *AgentClient) buildRequest(sysprompt, msg string) ([]byte, error) { } prompt := strings.TrimSpace(sb.String()) - if isDeepSeek { + switch { + case isDeepSeek: // DeepSeek completion req := models.NewDSCompletionReq(prompt, model, defaultProps["temperature"], []string{}) req.Stream = false // Agents don't need streaming return json.Marshal(req) - } else if isOpenRouter { + case isOpenRouter: // OpenRouter completion req := models.NewOpenRouterCompletionReq(model, prompt, defaultProps, []string{}) req.Stream = false // Agents don't need streaming return json.Marshal(req) - } else { + default: // Assume llama.cpp completion req := models.NewLCPReq(prompt, model, nil, defaultProps, []string{}) req.Stream = false // Agents don't need streaming @@ -103,15 +104,16 @@ func (ag *AgentClient) buildRequest(sysprompt, msg string) ([]byte, error) { Messages: messages, } - if isDeepSeek { + switch { + case isDeepSeek: // DeepSeek chat req := models.NewDSChatReq(*chatBody) return json.Marshal(req) - } else if isOpenRouter { + case isOpenRouter: // OpenRouter chat req := models.NewOpenRouterChatReq(*chatBody, defaultProps) return json.Marshal(req) - } else { + default: // Assume llama.cpp chat (OpenAI format) req := models.OpenAIReq{ ChatBody: chatBody, @@ -113,7 +113,7 @@ func parseKnownToTag(content string) []string { // processMessageTag processes a message for known_to tag and sets KnownTo field. // It also ensures the sender's role is included in KnownTo. // If KnownTo already set (e.g., from DB), preserves it unless new tag found. -func processMessageTag(msg models.RoleMsg) models.RoleMsg { +func processMessageTag(msg *models.RoleMsg) *models.RoleMsg { if cfg == nil || !cfg.CharSpecificContextEnabled { return msg } @@ -297,7 +297,8 @@ func warmUpModel() { go func() { var data []byte var err error - if strings.HasSuffix(cfg.CurrentAPI, "/completion") { + switch { + case strings.HasSuffix(cfg.CurrentAPI, "/completion"): // Old completion endpoint req := models.NewLCPReq(".", chatBody.Model, nil, map[string]float32{ "temperature": 0.8, @@ -307,7 +308,7 @@ func warmUpModel() { }, []string{}) req.Stream = false data, err = json.Marshal(req) - } else if strings.Contains(cfg.CurrentAPI, "/v1/chat/completions") { + case strings.Contains(cfg.CurrentAPI, "/v1/chat/completions"): // OpenAI-compatible chat endpoint req := models.OpenAIReq{ ChatBody: &models.ChatBody{ @@ -320,7 +321,7 @@ func warmUpModel() { Tools: nil, } data, err = json.Marshal(req) - } else { + default: // Unknown local endpoint, skip return } @@ -861,14 +862,14 @@ out: // lastM.Content = lastM.Content + respText.String() // Process the updated message to check for known_to tags in resumed response updatedMsg := chatBody.Messages[len(chatBody.Messages)-1] - processedMsg := processMessageTag(updatedMsg) - chatBody.Messages[len(chatBody.Messages)-1] = processedMsg + processedMsg := processMessageTag(&updatedMsg) + chatBody.Messages[len(chatBody.Messages)-1] = *processedMsg } else { newMsg := models.RoleMsg{ Role: botPersona, Content: respText.String(), } // Process the new message to check for known_to tags in LLM response - newMsg = processMessageTag(newMsg) + newMsg = *processMessageTag(&newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } cleanChatBody() @@ -889,7 +890,7 @@ out: if cfg.AutoTurn { lastMsg := chatBody.Messages[len(chatBody.Messages)-1] if len(lastMsg.KnownTo) > 0 { - triggerPrivateMessageResponses(lastMsg) + triggerPrivateMessageResponses(&lastMsg) } } return nil @@ -970,7 +971,7 @@ func unmarshalFuncCall(jsonStr string) (*models.FuncCall, error) { // findCall: adds chatRoundReq into the chatRoundChan and returns true if does func findCall(msg, toolCall string) bool { - fc := &models.FuncCall{} + var fc *models.FuncCall if toolCall != "" { // HTML-decode the tool call string to handle encoded characters like < -> <= decodedToolCall := html.UnescapeString(toolCall) @@ -1306,7 +1307,7 @@ func init() { // triggerPrivateMessageResponses checks if a message was sent privately to specific characters // and triggers those non-user characters to respond -func triggerPrivateMessageResponses(msg models.RoleMsg) { +func triggerPrivateMessageResponses(msg *models.RoleMsg) { if cfg == nil || !cfg.CharSpecificContextEnabled { return } diff --git a/bot_test.go b/bot_test.go index 4cbe953..1710003 100644 --- a/bot_test.go +++ b/bot_test.go @@ -506,7 +506,7 @@ func TestProcessMessageTag(t *testing.T) { CharSpecificContextTag: tt.tag, } cfg = testCfg - got := processMessageTag(tt.msg) + got := processMessageTag(&tt.msg) if len(got.KnownTo) != len(tt.wantMsg.KnownTo) { t.Errorf("processMessageTag() KnownTo length = %v, want %v", len(got.KnownTo), len(tt.wantMsg.KnownTo)) t.Logf("got: %v", got.KnownTo) @@ -159,7 +159,7 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro } if msg != "" { // otherwise let the bot to continue newMsg := models.RoleMsg{Role: role, Content: msg} - newMsg = processMessageTag(newMsg) + newMsg = *processMessageTag(&newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } if !resume { @@ -319,7 +319,7 @@ func (op LCPChat) FormMsg(msg, role string, resume bool) (io.Reader, error) { // Create a simple text message newMsg = models.NewRoleMsg(role, msg) } - newMsg = processMessageTag(newMsg) + newMsg = *processMessageTag(&newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) logger.Debug("LCPChat FormMsg: added message to chatBody", "role", newMsg.Role, "content_len", len(newMsg.Content), "message_count_after_add", len(chatBody.Messages)) @@ -413,7 +413,7 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader } if msg != "" { // otherwise let the bot to continue newMsg := models.RoleMsg{Role: role, Content: msg} - newMsg = processMessageTag(newMsg) + newMsg = *processMessageTag(&newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } if !resume { @@ -504,7 +504,7 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro } if msg != "" { // otherwise let the bot continue newMsg := models.RoleMsg{Role: role, Content: msg} - newMsg = processMessageTag(newMsg) + newMsg = *processMessageTag(&newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } if !resume { @@ -586,7 +586,7 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader logger.Debug("formmsg openroutercompletion", "link", cfg.CurrentAPI) if msg != "" { // otherwise let the bot to continue newMsg := models.RoleMsg{Role: role, Content: msg} - newMsg = processMessageTag(newMsg) + newMsg = *processMessageTag(&newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } if !resume { @@ -707,7 +707,7 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro // Create a simple text message newMsg = models.NewRoleMsg(role, msg) } - newMsg = processMessageTag(newMsg) + newMsg = *processMessageTag(&newMsg) chatBody.Messages = append(chatBody.Messages, newMsg) } if !resume { diff --git a/models/db.go b/models/db.go index 090f46d..73a0b53 100644 --- a/models/db.go +++ b/models/db.go @@ -14,7 +14,7 @@ type Chat struct { UpdatedAt time.Time `db:"updated_at" json:"updated_at"` } -func (c Chat) ToHistory() ([]RoleMsg, error) { +func (c *Chat) ToHistory() ([]RoleMsg, error) { resp := []RoleMsg{} if err := json.Unmarshal([]byte(c.Msgs), &resp); err != nil { return nil, err diff --git a/models/models.go b/models/models.go index 4133a7c..34e3dcf 100644 --- a/models/models.go +++ b/models/models.go @@ -98,7 +98,7 @@ type RoleMsg struct { } // MarshalJSON implements custom JSON marshaling for RoleMsg -func (m RoleMsg) MarshalJSON() ([]byte, error) { +func (m *RoleMsg) MarshalJSON() ([]byte, error) { if m.hasContentParts { // Use structured content format aux := struct { @@ -166,11 +166,11 @@ func (m *RoleMsg) UnmarshalJSON(data []byte) error { return nil } -func (m RoleMsg) ToText(i int) string { +func (m *RoleMsg) ToText(i int) string { icon := fmt.Sprintf("(%d)", i) // Convert content to string representation - contentStr := "" + var contentStr string if !m.hasContentParts { contentStr = m.Content } else { @@ -198,8 +198,8 @@ func (m RoleMsg) ToText(i int) string { return strings.ReplaceAll(textMsg, "\n\n", "\n") } -func (m RoleMsg) ToPrompt() string { - contentStr := "" +func (m *RoleMsg) ToPrompt() string { + var contentStr string if !m.hasContentParts { contentStr = m.Content } else { @@ -240,7 +240,7 @@ func NewMultimodalMsg(role string, contentParts []interface{}) RoleMsg { } // HasContent returns true if the message has either string content or structured content parts -func (m RoleMsg) HasContent() bool { +func (m *RoleMsg) HasContent() bool { if m.Content != "" { return true } @@ -251,17 +251,17 @@ func (m RoleMsg) HasContent() bool { } // IsContentParts returns true if the message uses structured content parts -func (m RoleMsg) IsContentParts() bool { +func (m *RoleMsg) IsContentParts() bool { return m.hasContentParts } // GetContentParts returns the content parts of the message -func (m RoleMsg) GetContentParts() []interface{} { +func (m *RoleMsg) GetContentParts() []interface{} { return m.ContentParts } // Copy creates a copy of the RoleMsg with all fields -func (m RoleMsg) Copy() RoleMsg { +func (m *RoleMsg) Copy() RoleMsg { return RoleMsg{ Role: m.Role, Content: m.Content, @@ -382,12 +382,14 @@ func (cb *ChatBody) MakeStopSliceExcluding( continue } // Add multiple variations to catch different formatting - ss = append(ss, role+":\n") // Most common: role with newline - ss = append(ss, role+":") // Role with colon but no newline - ss = append(ss, role+": ") // Role with colon and single space - ss = append(ss, role+": ") // Role with colon and double space (common tokenization) - ss = append(ss, role+": \n") // Role with colon and double space (common tokenization) - ss = append(ss, role+": ") // Role with colon and triple space + ss = append(ss, + role+":\n", // Most common: role with newline + role+":", // Role with colon but no newline + role+": ", // Role with colon and single space + role+": ", // Role with colon and double space (common tokenization) + role+": \n", // Role with colon and double space (common tokenization) + role+": ", // Role with colon and triple space + ) } return ss } diff --git a/models/openrouter.go b/models/openrouter.go index 29ba0d8..6196498 100644 --- a/models/openrouter.go +++ b/models/openrouter.go @@ -143,7 +143,8 @@ type ORModels struct { func (orm *ORModels) ListModels(free bool) []string { resp := []string{} - for _, model := range orm.Data { + for i := range orm.Data { + model := &orm.Data[i] // Take address of element to avoid copying if free { if model.Pricing.Prompt == "0" && model.Pricing.Completion == "0" { // treat missing request as free diff --git a/pngmeta/altwriter.go b/pngmeta/altwriter.go index 206b563..76cb709 100644 --- a/pngmeta/altwriter.go +++ b/pngmeta/altwriter.go @@ -120,7 +120,7 @@ func createTextChunk(embed PngEmbed) ([]byte, error) { if err := binary.Write(chunk, binary.BigEndian, uint32(len(data))); err != nil { return nil, fmt.Errorf("error writing chunk length: %w", err) } - if _, err := chunk.Write([]byte(textChunkType)); err != nil { + if _, err := chunk.WriteString(textChunkType); err != nil { return nil, fmt.Errorf("error writing chunk type: %w", err) } if _, err := chunk.Write(data); err != nil { diff --git a/props_table.go b/props_table.go index d037bb0..50c8886 100644 --- a/props_table.go +++ b/props_table.go @@ -313,11 +313,12 @@ func makePropsTable(props map[string]float32) *tview.Table { logger.Warn("empty options list for", "label", label, "api", cfg.CurrentAPI, "localModelsLen", len(LocalModels), "orModelsLen", len(ORFreeModels)) message := "No options available for " + label if label == "Select a model" { - if strings.Contains(cfg.CurrentAPI, "openrouter.ai") { + switch { + case strings.Contains(cfg.CurrentAPI, "openrouter.ai"): message = "No OpenRouter models available. Check token and connection." - } else if strings.Contains(cfg.CurrentAPI, "api.deepseek.com") { + case strings.Contains(cfg.CurrentAPI, "api.deepseek.com"): message = "DeepSeek models should be available. Please report bug." - } else { + default: message = "No llama.cpp models loaded. Ensure llama.cpp server is running with models." } } @@ -107,7 +107,7 @@ func (r *RAG) LoadRAG(fpath string) error { } // Adjust batch size if needed - if len(paragraphs) < int(r.cfg.RAGBatchSize) && len(paragraphs) > 0 { + if len(paragraphs) < r.cfg.RAGBatchSize && len(paragraphs) > 0 { r.cfg.RAGBatchSize = len(paragraphs) } @@ -133,7 +133,7 @@ func (r *RAG) LoadRAG(fpath string) error { ctn := 0 totalParagraphs := len(paragraphs) for { - if int(right) > totalParagraphs { + if right > totalParagraphs { batchCh <- map[int][]string{left: paragraphs[left:]} break } @@ -30,7 +30,7 @@ func makeChatTable(chatMap map[string]models.Chat) *tview.Table { // Add header row (row 0) for c := 0; c < cols; c++ { color := tcell.ColorWhite - headerText := "" + var headerText string switch c { case 0: headerText = "Chat Name" @@ -259,19 +259,20 @@ func makeRAGTable(fileList []string) *tview.Flex { for r := 0; r < rows; r++ { for c := 0; c < cols; c++ { color := tcell.ColorWhite - if c < 1 { + switch { + case c < 1: fileTable.SetCell(r+1, c, // +1 to account for the exit row at index 0 tview.NewTableCell(fileList[r]). SetTextColor(color). SetAlign(tview.AlignCenter). SetSelectable(false)) - } else if c == 1 { // Action description column - not selectable + case c == 1: // Action description column - not selectable fileTable.SetCell(r+1, c, // +1 to account for the exit row at index 0 tview.NewTableCell("(Action)"). SetTextColor(color). SetAlign(tview.AlignCenter). SetSelectable(false)) - } else { // Action button column - selectable + default: // Action button column - selectable fileTable.SetCell(r+1, c, // +1 to account for the exit row at index 0 tview.NewTableCell(actions[c-1]). SetTextColor(color). @@ -406,19 +407,20 @@ func makeLoadedRAGTable(fileList []string) *tview.Flex { for r := 0; r < rows; r++ { for c := 0; c < cols; c++ { color := tcell.ColorWhite - if c < 1 { + switch { + case c < 1: fileTable.SetCell(r+1, c, // +1 to account for the exit row at index 0 tview.NewTableCell(fileList[r]). SetTextColor(color). SetAlign(tview.AlignCenter). SetSelectable(false)) - } else if c == 1 { // Action description column - not selectable + case c == 1: // Action description column - not selectable fileTable.SetCell(r+1, c, // +1 to account for the exit row at index 0 tview.NewTableCell("(Action)"). SetTextColor(color). SetAlign(tview.AlignCenter). SetSelectable(false)) - } else { // Action button column - selectable + default: // Action button column - selectable fileTable.SetCell(r+1, c, // +1 to account for the exit row at index 0 tview.NewTableCell(actions[c-1]). SetTextColor(color). @@ -487,13 +489,14 @@ func makeAgentTable(agentList []string) *tview.Table { for r := 0; r < rows; r++ { for c := 0; c < cols; c++ { color := tcell.ColorWhite - if c < 1 { + switch { + case c < 1: chatActTable.SetCell(r, c, tview.NewTableCell(agentList[r]). SetTextColor(color). SetAlign(tview.AlignCenter). SetSelectable(false)) - } else if c == 1 { + case c == 1: if actions[c-1] == "filepath" { cc, ok := sysMap[agentList[r]] if !ok { @@ -510,7 +513,7 @@ func makeAgentTable(agentList []string) *tview.Table { tview.NewTableCell(actions[c-1]). SetTextColor(color). SetAlign(tview.AlignCenter)) - } else { + default: chatActTable.SetCell(r, c, tview.NewTableCell(actions[c-1]). SetTextColor(color). @@ -600,13 +603,14 @@ func makeCodeBlockTable(codeBlocks []string) *tview.Table { if len(codeBlocks[r]) < 30 { previewLen = len(codeBlocks[r]) } - if c < 1 { + switch { + case c < 1: table.SetCell(r, c, tview.NewTableCell(codeBlocks[r][:previewLen]). SetTextColor(color). SetAlign(tview.AlignCenter). SetSelectable(false)) - } else { + default: table.SetCell(r, c, tview.NewTableCell(actions[c-1]). SetTextColor(color). @@ -671,13 +675,14 @@ func makeImportChatTable(filenames []string) *tview.Table { for r := 0; r < rows; r++ { for c := 0; c < cols; c++ { color := tcell.ColorWhite - if c < 1 { + switch { + case c < 1: chatActTable.SetCell(r, c, tview.NewTableCell(filenames[r]). SetTextColor(color). SetAlign(tview.AlignCenter). SetSelectable(false)) - } else { + default: chatActTable.SetCell(r, c, tview.NewTableCell(actions[c-1]). SetTextColor(color). @@ -861,25 +866,23 @@ func makeFilePicker() *tview.Flex { currentStackPos = len(dirStack) - 1 statusView.SetText("Current: " + newDir) }) - } else { + } else if hasAllowedExtension(name) { // Only show files that have allowed extensions (from config) - if hasAllowedExtension(name) { - // Capture the file name for the closure to avoid loop variable issues - fileName := name - fullFilePath := path.Join(dir, fileName) - listView.AddItem(fileName+" [gray](File)[-]", "", 0, func() { - selectedFile = fullFilePath + // Capture the file name for the closure to avoid loop variable issues + fileName := name + fullFilePath := path.Join(dir, fileName) + listView.AddItem(fileName+" [gray](File)[-]", "", 0, func() { + selectedFile = fullFilePath + statusView.SetText("Selected: " + selectedFile) + // Check if the file is an image + if isImageFile(fileName) { + // For image files, offer to attach to the next LLM message + statusView.SetText("Selected image: " + selectedFile) + } else { + // For non-image files, display as before statusView.SetText("Selected: " + selectedFile) - // Check if the file is an image - if isImageFile(fileName) { - // For image files, offer to attach to the next LLM message - statusView.SetText("Selected image: " + selectedFile) - } else { - // For non-image files, display as before - statusView.SetText("Selected: " + selectedFile) - } - }) - } + } + }) } } statusView.SetText("Current: " + dir) @@ -533,8 +533,7 @@ func init() { }) textView.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey { // Handle vim-like navigation in TextView - switch event.Key() { - case tcell.KeyRune: + if event.Key() == tcell.KeyRune { switch event.Rune() { case 'j': // For line down @@ -672,17 +671,18 @@ func init() { return nil } m := chatBody.Messages[selectedIndex] - if roleEditMode { + switch { + case roleEditMode: hideIndexBar() // Hide overlay first // Set the current role as the default text in the input field roleEditWindow.SetText(m.Role) pages.AddPage(roleEditPage, roleEditWindow, true, true) roleEditMode = false // Reset the flag - } else if editMode { + case editMode: hideIndexBar() // Hide overlay first pages.AddPage(editMsgPage, editArea, true, true) editArea.SetText(m.Content, true) - } else { + default: if err := copyToClipboard(m.Content); err != nil { logger.Error("failed to copy to clipboard", "error", err) } @@ -760,22 +760,19 @@ func init() { pages.RemovePage(helpPage) }) helpView.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey { - switch event.Key() { - case tcell.KeyEnter: + if event.Key() == tcell.KeyEnter { return event - default: - if event.Key() == tcell.KeyRune && event.Rune() == 'x' { - pages.RemovePage(helpPage) - return nil - } + } + if event.Key() == tcell.KeyRune && event.Rune() == 'x' { + pages.RemovePage(helpPage) + return nil } return nil }) // imgView = tview.NewImage() imgView.SetInputCapture(func(event *tcell.EventKey) *tcell.EventKey { - switch event.Key() { - case tcell.KeyEnter: + if event.Key() == tcell.KeyEnter { pages.RemovePage(imgPage) return event } |
