summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--bot.go82
-rw-r--r--helpfuncs.go11
-rw-r--r--llm.go36
-rw-r--r--main.go2
-rw-r--r--models/models.go44
-rw-r--r--models/openrouter.go15
-rw-r--r--tools.go594
-rw-r--r--tui.go201
8 files changed, 893 insertions, 92 deletions
diff --git a/bot.go b/bot.go
index e8a13e3..66786ca 100644
--- a/bot.go
+++ b/bot.go
@@ -11,6 +11,7 @@ import (
"gf-lt/models"
"gf-lt/rag"
"gf-lt/storage"
+ "html"
"io"
"log/slog"
"net"
@@ -44,6 +45,7 @@ var (
ragger *rag.RAG
chunkParser ChunkParser
lastToolCall *models.FuncCall
+ lastToolCallID string // Store the ID of the most recent tool call
//nolint:unused // TTS_ENABLED conditionally uses this
orator extra.Orator
asr extra.STT
@@ -189,8 +191,7 @@ func sendMsgToLLM(body io.Reader) {
}
} else {
// Log the request body for debugging
- logger.Info("sending request to API", "api", cfg.CurrentAPI, "body", string(bodyBytes))
-
+ logger.Debug("sending request to API", "api", cfg.CurrentAPI, "body", string(bodyBytes))
// Create request with the captured body
req, err = http.NewRequest("POST", cfg.CurrentAPI, bytes.NewReader(bodyBytes))
if err != nil {
@@ -238,6 +239,9 @@ func sendMsgToLLM(body io.Reader) {
logger.Error("error reading response body", "error", err, "line", string(line),
"user_role", cfg.UserRole, "parser", chunkParser, "link", cfg.CurrentAPI)
// if err.Error() != "EOF" {
+ if err := notifyUser("API error", err.Error()); err != nil {
+ logger.Error("failed to notify", "error", err)
+ }
streamDone <- true
break
// }
@@ -267,11 +271,12 @@ func sendMsgToLLM(body io.Reader) {
break
}
// Handle error messages in response content
- if string(line) != "" && strings.Contains(strings.ToLower(string(line)), "error") {
- logger.Error("API error response detected", "line", line, "url", cfg.CurrentAPI)
- streamDone <- true
- break
- }
+ // example needed, since llm could use the word error in the normal msg
+ // if string(line) != "" && strings.Contains(strings.ToLower(string(line)), "error") {
+ // logger.Error("API error response detected", "line", line, "url", cfg.CurrentAPI)
+ // streamDone <- true
+ // break
+ // }
if chunk.Finished {
if chunk.Chunk != "" {
logger.Warn("text inside of finish llmchunk", "chunk", chunk, "counter", counter)
@@ -290,6 +295,8 @@ func sendMsgToLLM(body io.Reader) {
openAIToolChan <- chunk.ToolChunk
if chunk.FuncName != "" {
lastToolCall.Name = chunk.FuncName
+ // Store the tool call ID for the response
+ lastToolCallID = chunk.ToolID
}
interrupt:
if interruptResp { // read bytes, so it would not get into beginning of the next req
@@ -467,10 +474,23 @@ out:
func findCall(msg, toolCall string, tv *tview.TextView) {
fc := &models.FuncCall{}
if toolCall != "" {
+ // HTML-decode the tool call string to handle encoded characters like &lt; -> <=
+ decodedToolCall := html.UnescapeString(toolCall)
openAIToolMap := make(map[string]string)
// respect tool call
- if err := json.Unmarshal([]byte(toolCall), &openAIToolMap); err != nil {
- logger.Error("failed to unmarshal openai tool call", "call", toolCall, "error", err)
+ if err := json.Unmarshal([]byte(decodedToolCall), &openAIToolMap); err != nil {
+ logger.Error("failed to unmarshal openai tool call", "call", decodedToolCall, "error", err)
+ // Send error response to LLM so it can retry or handle the error
+ toolResponseMsg := models.RoleMsg{
+ Role: cfg.ToolRole,
+ Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err),
+ ToolCallID: lastToolCallID, // Use the stored tool call ID
+ }
+ chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
+ // Clear the stored tool call ID after using it
+ lastToolCallID = ""
+ // Trigger the assistant to continue processing with the error message
+ chatRound("", cfg.AssistantRole, tv, false, false)
return
}
lastToolCall.Args = openAIToolMap
@@ -483,8 +503,18 @@ func findCall(msg, toolCall string, tv *tview.TextView) {
prefix := "__tool_call__\n"
suffix := "\n__tool_call__"
jsStr = strings.TrimSuffix(strings.TrimPrefix(jsStr, prefix), suffix)
- if err := json.Unmarshal([]byte(jsStr), &fc); err != nil {
- logger.Error("failed to unmarshal tool call", "error", err, "json_string", jsStr)
+ // HTML-decode the JSON string to handle encoded characters like &lt; -> <=
+ decodedJsStr := html.UnescapeString(jsStr)
+ if err := json.Unmarshal([]byte(decodedJsStr), &fc); err != nil {
+ logger.Error("failed to unmarshal tool call", "error", err, "json_string", decodedJsStr)
+ // Send error response to LLM so it can retry or handle the error
+ toolResponseMsg := models.RoleMsg{
+ Role: cfg.ToolRole,
+ Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err),
+ }
+ chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
+ // Trigger the assistant to continue processing with the error message
+ chatRound("", cfg.AssistantRole, tv, false, false)
return
}
}
@@ -492,14 +522,38 @@ func findCall(msg, toolCall string, tv *tview.TextView) {
f, ok := fnMap[fc.Name]
if !ok {
m := fc.Name + " is not implemented"
- chatRound(m, cfg.ToolRole, tv, false, false)
+ // Create tool response message with the proper tool_call_id
+ toolResponseMsg := models.RoleMsg{
+ Role: cfg.ToolRole,
+ Content: m,
+ ToolCallID: lastToolCallID, // Use the stored tool call ID
+ }
+ chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
+ // Clear the stored tool call ID after using it
+ lastToolCallID = ""
+
+ // Trigger the assistant to continue processing with the new tool response
+ // by calling chatRound with empty content to continue the assistant's response
+ chatRound("", cfg.AssistantRole, tv, false, false)
return
}
resp := f(fc.Args)
- toolMsg := fmt.Sprintf("tool response: %+v", string(resp))
+ toolMsg := string(resp) // Remove the "tool response: " prefix and %+v formatting
+ logger.Info("llm used tool call", "tool_resp", toolMsg, "tool_attrs", fc)
fmt.Fprintf(tv, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n",
"\n", len(chatBody.Messages), cfg.ToolRole, toolMsg)
- chatRound(toolMsg, cfg.ToolRole, tv, false, false)
+ // Create tool response message with the proper tool_call_id
+ toolResponseMsg := models.RoleMsg{
+ Role: cfg.ToolRole,
+ Content: toolMsg,
+ ToolCallID: lastToolCallID, // Use the stored tool call ID
+ }
+ chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
+ // Clear the stored tool call ID after using it
+ lastToolCallID = ""
+ // Trigger the assistant to continue processing with the new tool response
+ // by calling chatRound with empty content to continue the assistant's response
+ chatRound("", cfg.AssistantRole, tv, false, false)
}
func chatToTextSlice(showSys bool) []string {
diff --git a/helpfuncs.go b/helpfuncs.go
index f238cd4..d73befe 100644
--- a/helpfuncs.go
+++ b/helpfuncs.go
@@ -209,8 +209,17 @@ func makeStatusLine() string {
} else {
imageInfo = ""
}
+
+ // Add shell mode status to status line
+ var shellModeInfo string
+ if shellMode {
+ shellModeInfo = " | [green:-:b]SHELL MODE[-:-:-]"
+ } else {
+ shellModeInfo = ""
+ }
+
statusLine := fmt.Sprintf(indexLineCompletion, botRespMode, cfg.AssistantRole, activeChatName,
cfg.ToolUse, chatBody.Model, cfg.SkipLLMResp, cfg.CurrentAPI, cfg.ThinkUse, logLevel.Level(),
isRecording, persona, botPersona, injectRole)
- return statusLine + imageInfo
+ return statusLine + imageInfo + shellModeInfo
}
diff --git a/llm.go b/llm.go
index 38b484f..e7245ce 100644
--- a/llm.go
+++ b/llm.go
@@ -76,7 +76,6 @@ type OpenRouterChat struct {
Model string
}
-
func (lcp LlamaCPPeer) GetToken() string {
return ""
}
@@ -161,11 +160,14 @@ func (op OpenAIer) ParseChunk(data []byte) (*models.TextChunk, error) {
Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Delta.Content,
}
if len(llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls) > 0 {
- resp.ToolChunk = llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls[0].Function.Arguments
- fname := llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls[0].Function.Name
+ toolCall := llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls[0]
+ resp.ToolChunk = toolCall.Function.Arguments
+ fname := toolCall.Function.Name
if fname != "" {
resp.FuncName = fname
}
+ // Capture the tool call ID if available
+ resp.ToolID = toolCall.ID
}
if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" {
if resp.Chunk != "" {
@@ -469,6 +471,22 @@ func (or OpenRouterChat) ParseChunk(data []byte) (*models.TextChunk, error) {
resp := &models.TextChunk{
Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Delta.Content,
}
+
+ // Handle tool calls similar to OpenAIer
+ if len(llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls) > 0 {
+ toolCall := llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls[0]
+ resp.ToolChunk = toolCall.Function.Arguments
+ fname := toolCall.Function.Name
+ if fname != "" {
+ resp.FuncName = fname
+ }
+ // Capture the tool call ID if available
+ resp.ToolID = toolCall.ID
+ }
+ if resp.ToolChunk != "" {
+ resp.ToolResp = true
+ }
+
if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" {
if resp.Chunk != "" {
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
@@ -484,16 +502,9 @@ func (or OpenRouterChat) GetToken() string {
func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
logger.Debug("formmsg open router completion", "link", cfg.CurrentAPI)
-
// Capture the image attachment path at the beginning to avoid race conditions
// with API rotation that might clear the global variable
localImageAttachmentPath := imageAttachmentPath
-
- if cfg.ToolUse && !resume {
- // prompt += "\n" + cfg.ToolRole + ":\n" + toolSysMsg
- // add to chat body
- chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
- }
if msg != "" { // otherwise let the bot continue
var newMsg models.RoleMsg
// Check if we have an image to add to this message
@@ -536,7 +547,6 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
Model: chatBody.Model,
Stream: chatBody.Stream,
}
-
for i, msg := range chatBody.Messages {
bodyCopy.Messages[i] = msg
// Standardize role if it's a user role
@@ -544,8 +554,10 @@ func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
bodyCopy.Messages[i].Role = "user"
}
}
-
orBody := models.NewOpenRouterChatReq(*bodyCopy, defaultLCPProps)
+ if cfg.ToolUse && !resume && role != cfg.ToolRole {
+ orBody.Tools = baseTools // set tools to use
+ }
data, err := json.Marshal(orBody)
if err != nil {
logger.Error("failed to form a msg", "error", err)
diff --git a/main.go b/main.go
index b35fdf2..baa783d 100644
--- a/main.go
+++ b/main.go
@@ -14,6 +14,8 @@ var (
injectRole = true
selectedIndex = int(-1)
currentAPIIndex = 0 // Index to track current API in ApiLinks slice
+ currentORModelIndex = 0 // Index to track current OpenRouter model in ORFreeModels slice
+ shellMode = false
// indexLine = "F12 to show keys help | bot resp mode: [orange:-:b]%v[-:-:-] (F6) | card's char: [orange:-:b]%s[-:-:-] (ctrl+s) | chat: [orange:-:b]%s[-:-:-] (F1) | toolUseAdviced: [orange:-:b]%v[-:-:-] (ctrl+k) | model: [orange:-:b]%s[-:-:-] (ctrl+l) | skip LLM resp: [orange:-:b]%v[-:-:-] (F10)\nAPI_URL: [orange:-:b]%s[-:-:-] (ctrl+v) | ThinkUse: [orange:-:b]%v[-:-:-] (ctrl+p) | Log Level: [orange:-:b]%v[-:-:-] (ctrl+p) | Recording: [orange:-:b]%v[-:-:-] (ctrl+r) | Writing as: [orange:-:b]%s[-:-:-] (ctrl+q)"
indexLineCompletion = "F12 to show keys help | bot resp mode: [orange:-:b]%v[-:-:-] (F6) | card's char: [orange:-:b]%s[-:-:-] (ctrl+s) | chat: [orange:-:b]%s[-:-:-] (F1) | toolUseAdviced: [orange:-:b]%v[-:-:-] (ctrl+k) | model: [orange:-:b]%s[-:-:-] (ctrl+l) | skip LLM resp: [orange:-:b]%v[-:-:-] (F10)\nAPI_URL: [orange:-:b]%s[-:-:-] (ctrl+v) | Insert <think>: [orange:-:b]%v[-:-:-] (ctrl+p) | Log Level: [orange:-:b]%v[-:-:-] (ctrl+p) | Recording: [orange:-:b]%v[-:-:-] (ctrl+r) | Writing as: [orange:-:b]%s[-:-:-] (ctrl+q) | Bot will write as [orange:-:b]%s[-:-:-] (ctrl+x) | role_inject [orange:-:b]%v[-:-:-]"
focusSwitcher = map[tview.Primitive]tview.Primitive{}
diff --git a/models/models.go b/models/models.go
index 58f0291..798ea35 100644
--- a/models/models.go
+++ b/models/models.go
@@ -9,6 +9,7 @@ import (
)
type FuncCall struct {
+ ID string `json:"id,omitempty"`
Name string `json:"name"`
Args map[string]string `json:"args"`
}
@@ -39,6 +40,7 @@ type ToolDeltaFunc struct {
}
type ToolDeltaResp struct {
+ ID string `json:"id,omitempty"`
Index int `json:"index"`
Function ToolDeltaFunc `json:"function"`
}
@@ -70,6 +72,7 @@ type TextChunk struct {
Finished bool
ToolResp bool
FuncName string
+ ToolID string
}
type TextContentPart struct {
@@ -86,10 +89,11 @@ type ImageContentPart struct {
// RoleMsg represents a message with content that can be either a simple string or structured content parts
type RoleMsg struct {
- Role string `json:"role"`
- Content string `json:"-"`
- ContentParts []interface{} `json:"-"`
- hasContentParts bool // Flag to indicate which content type to marshal
+ Role string `json:"role"`
+ Content string `json:"-"`
+ ContentParts []interface{} `json:"-"`
+ ToolCallID string `json:"tool_call_id,omitempty"` // For tool response messages
+ hasContentParts bool // Flag to indicate which content type to marshal
}
// MarshalJSON implements custom JSON marshaling for RoleMsg
@@ -97,21 +101,25 @@ func (m RoleMsg) MarshalJSON() ([]byte, error) {
if m.hasContentParts {
// Use structured content format
aux := struct {
- Role string `json:"role"`
- Content []interface{} `json:"content"`
+ Role string `json:"role"`
+ Content []interface{} `json:"content"`
+ ToolCallID string `json:"tool_call_id,omitempty"`
}{
- Role: m.Role,
- Content: m.ContentParts,
+ Role: m.Role,
+ Content: m.ContentParts,
+ ToolCallID: m.ToolCallID,
}
return json.Marshal(aux)
} else {
// Use simple content format
aux := struct {
- Role string `json:"role"`
- Content string `json:"content"`
+ Role string `json:"role"`
+ Content string `json:"content"`
+ ToolCallID string `json:"tool_call_id,omitempty"`
}{
- Role: m.Role,
- Content: m.Content,
+ Role: m.Role,
+ Content: m.Content,
+ ToolCallID: m.ToolCallID,
}
return json.Marshal(aux)
}
@@ -121,26 +129,30 @@ func (m RoleMsg) MarshalJSON() ([]byte, error) {
func (m *RoleMsg) UnmarshalJSON(data []byte) error {
// First, try to unmarshal as structured content format
var structured struct {
- Role string `json:"role"`
- Content []interface{} `json:"content"`
+ Role string `json:"role"`
+ Content []interface{} `json:"content"`
+ ToolCallID string `json:"tool_call_id,omitempty"`
}
if err := json.Unmarshal(data, &structured); err == nil && len(structured.Content) > 0 {
m.Role = structured.Role
m.ContentParts = structured.Content
+ m.ToolCallID = structured.ToolCallID
m.hasContentParts = true
return nil
}
// Otherwise, unmarshal as simple content format
var simple struct {
- Role string `json:"role"`
- Content string `json:"content"`
+ Role string `json:"role"`
+ Content string `json:"content"`
+ ToolCallID string `json:"tool_call_id,omitempty"`
}
if err := json.Unmarshal(data, &simple); err != nil {
return err
}
m.Role = simple.Role
m.Content = simple.Content
+ m.ToolCallID = simple.ToolCallID
m.hasContentParts = false
return nil
}
diff --git a/models/openrouter.go b/models/openrouter.go
index 933598e..50f26b6 100644
--- a/models/openrouter.go
+++ b/models/openrouter.go
@@ -31,6 +31,7 @@ type OpenRouterChatReq struct {
Temperature float32 `json:"temperature"`
MinP float32 `json:"min_p"`
NPredict int32 `json:"max_tokens"`
+ Tools []Tool `json:"tools"`
}
func NewOpenRouterChatReq(cb ChatBody, props map[string]float32) OpenRouterChatReq {
@@ -56,10 +57,11 @@ type OpenRouterChatRespNonStream struct {
NativeFinishReason string `json:"native_finish_reason"`
Index int `json:"index"`
Message struct {
- Role string `json:"role"`
- Content string `json:"content"`
- Refusal any `json:"refusal"`
- Reasoning any `json:"reasoning"`
+ Role string `json:"role"`
+ Content string `json:"content"`
+ Refusal any `json:"refusal"`
+ Reasoning any `json:"reasoning"`
+ ToolCalls []ToolDeltaResp `json:"tool_calls"`
} `json:"message"`
} `json:"choices"`
Usage struct {
@@ -78,8 +80,9 @@ type OpenRouterChatResp struct {
Choices []struct {
Index int `json:"index"`
Delta struct {
- Role string `json:"role"`
- Content string `json:"content"`
+ Role string `json:"role"`
+ Content string `json:"content"`
+ ToolCalls []ToolDeltaResp `json:"tool_calls"`
} `json:"delta"`
FinishReason string `json:"finish_reason"`
NativeFinishReason string `json:"native_finish_reason"`
diff --git a/tools.go b/tools.go
index 72b065c..fda8750 100644
--- a/tools.go
+++ b/tools.go
@@ -6,6 +6,9 @@ import (
"fmt"
"gf-lt/extra"
"gf-lt/models"
+ "io"
+ "os"
+ "os/exec"
"regexp"
"strconv"
"strings"
@@ -36,13 +39,58 @@ Your current tools:
},
{
"name":"memorise",
-"args": ["topic", "info"],
-"when_to_use": "when asked to memorise something"
+"args": ["topic", "data"],
+"when_to_use": "when asked to memorise information under a topic"
},
{
"name":"recall_topics",
"args": [],
"when_to_use": "to see what topics are saved in memory"
+},
+{
+"name":"websearch",
+"args": ["query", "limit"],
+"when_to_use": "when asked to search the web for information; limit is optional (default 3)"
+},
+{
+"name":"file_create",
+"args": ["path", "content"],
+"when_to_use": "when asked to create a new file with optional content"
+},
+{
+"name":"file_read",
+"args": ["path"],
+"when_to_use": "when asked to read the content of a file"
+},
+{
+"name":"file_write",
+"args": ["path", "content", "mode"],
+"when_to_use": "when asked to write content to a file; mode is optional (overwrite or append, default: overwrite)"
+},
+{
+"name":"file_delete",
+"args": ["path"],
+"when_to_use": "when asked to delete a file"
+},
+{
+"name":"file_move",
+"args": ["src", "dst"],
+"when_to_use": "when asked to move a file from source to destination"
+},
+{
+"name":"file_copy",
+"args": ["src", "dst"],
+"when_to_use": "when asked to copy a file from source to destination"
+},
+{
+"name":"file_list",
+"args": ["path"],
+"when_to_use": "when asked to list files in a directory; path is optional (default: current directory)"
+},
+{
+"name":"execute_command",
+"args": ["command", "args"],
+"when_to_use": "when asked to execute a system command; args is optional"
}
]
</tools>
@@ -171,13 +219,371 @@ func recallTopics(args map[string]string) []byte {
return []byte(joinedS)
}
+// File Manipulation Tools
+
+func fileCreate(args map[string]string) []byte {
+ path, ok := args["path"]
+ if !ok || path == "" {
+ msg := "path not provided to file_create tool"
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ content, ok := args["content"]
+ if !ok {
+ content = ""
+ }
+
+ if err := writeStringToFile(path, content); err != nil {
+ msg := "failed to create file; error: " + err.Error()
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ msg := "file created successfully at " + path
+ return []byte(msg)
+}
+
+func fileRead(args map[string]string) []byte {
+ path, ok := args["path"]
+ if !ok || path == "" {
+ msg := "path not provided to file_read tool"
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ content, err := readStringFromFile(path)
+ if err != nil {
+ msg := "failed to read file; error: " + err.Error()
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ result := map[string]string{
+ "content": content,
+ "path": path,
+ }
+ jsonResult, err := json.Marshal(result)
+ if err != nil {
+ msg := "failed to marshal result; error: " + err.Error()
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ return jsonResult
+}
+
+func fileWrite(args map[string]string) []byte {
+ path, ok := args["path"]
+ if !ok || path == "" {
+ msg := "path not provided to file_write tool"
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ content, ok := args["content"]
+ if !ok {
+ content = ""
+ }
+
+ mode, ok := args["mode"]
+ if !ok || mode == "" {
+ mode = "overwrite"
+ }
+
+ switch mode {
+ case "overwrite":
+ if err := writeStringToFile(path, content); err != nil {
+ msg := "failed to write to file; error: " + err.Error()
+ logger.Error(msg)
+ return []byte(msg)
+ }
+ case "append":
+ if err := appendStringToFile(path, content); err != nil {
+ msg := "failed to append to file; error: " + err.Error()
+ logger.Error(msg)
+ return []byte(msg)
+ }
+ default:
+ msg := "invalid mode; use 'overwrite' or 'append'"
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ msg := "file written successfully at " + path
+ return []byte(msg)
+}
+
+func fileDelete(args map[string]string) []byte {
+ path, ok := args["path"]
+ if !ok || path == "" {
+ msg := "path not provided to file_delete tool"
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ if err := removeFile(path); err != nil {
+ msg := "failed to delete file; error: " + err.Error()
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ msg := "file deleted successfully at " + path
+ return []byte(msg)
+}
+
+func fileMove(args map[string]string) []byte {
+ src, ok := args["src"]
+ if !ok || src == "" {
+ msg := "source path not provided to file_move tool"
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ dst, ok := args["dst"]
+ if !ok || dst == "" {
+ msg := "destination path not provided to file_move tool"
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ if err := moveFile(src, dst); err != nil {
+ msg := "failed to move file; error: " + err.Error()
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ msg := fmt.Sprintf("file moved successfully from %s to %s", src, dst)
+ return []byte(msg)
+}
+
+func fileCopy(args map[string]string) []byte {
+ src, ok := args["src"]
+ if !ok || src == "" {
+ msg := "source path not provided to file_copy tool"
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ dst, ok := args["dst"]
+ if !ok || dst == "" {
+ msg := "destination path not provided to file_copy tool"
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ if err := copyFile(src, dst); err != nil {
+ msg := "failed to copy file; error: " + err.Error()
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ msg := fmt.Sprintf("file copied successfully from %s to %s", src, dst)
+ return []byte(msg)
+}
+
+func fileList(args map[string]string) []byte {
+ path, ok := args["path"]
+ if !ok || path == "" {
+ path = "." // default to current directory
+ }
+
+ files, err := listDirectory(path)
+ if err != nil {
+ msg := "failed to list directory; error: " + err.Error()
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ result := map[string]interface{}{
+ "directory": path,
+ "files": files,
+ }
+ jsonResult, err := json.Marshal(result)
+ if err != nil {
+ msg := "failed to marshal result; error: " + err.Error()
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ return jsonResult
+}
+
+// Helper functions for file operations
+
+func readStringFromFile(filename string) (string, error) {
+ data, err := os.ReadFile(filename)
+ if err != nil {
+ return "", err
+ }
+ return string(data), nil
+}
+
+func writeStringToFile(filename string, data string) error {
+ return os.WriteFile(filename, []byte(data), 0644)
+}
+
+func appendStringToFile(filename string, data string) error {
+ file, err := os.OpenFile(filename, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
+ if err != nil {
+ return err
+ }
+ defer file.Close()
+
+ _, err = file.WriteString(data)
+ return err
+}
+
+func removeFile(filename string) error {
+ return os.Remove(filename)
+}
+
+func moveFile(src, dst string) error {
+ // First try with os.Rename (works within same filesystem)
+ if err := os.Rename(src, dst); err == nil {
+ return nil
+ }
+ // If that fails (e.g., cross-filesystem), copy and delete
+ return copyAndRemove(src, dst)
+}
+
+func copyFile(src, dst string) error {
+ srcFile, err := os.Open(src)
+ if err != nil {
+ return err
+ }
+ defer srcFile.Close()
+
+ dstFile, err := os.Create(dst)
+ if err != nil {
+ return err
+ }
+ defer dstFile.Close()
+
+ _, err = io.Copy(dstFile, srcFile)
+ return err
+}
+
+func copyAndRemove(src, dst string) error {
+ // Copy the file
+ if err := copyFile(src, dst); err != nil {
+ return err
+ }
+ // Remove the source file
+ return os.Remove(src)
+}
+
+func listDirectory(path string) ([]string, error) {
+ entries, err := os.ReadDir(path)
+ if err != nil {
+ return nil, err
+ }
+
+ var files []string
+ for _, entry := range entries {
+ if entry.IsDir() {
+ files = append(files, entry.Name()+"/") // Add "/" to indicate directory
+ } else {
+ files = append(files, entry.Name())
+ }
+ }
+
+ return files, nil
+}
+
+// Command Execution Tool
+
+func executeCommand(args map[string]string) []byte {
+ command, ok := args["command"]
+ if !ok || command == "" {
+ msg := "command not provided to execute_command tool"
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ if !isCommandAllowed(command) {
+ msg := fmt.Sprintf("command '%s' is not allowed", command)
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ // Get arguments - handle both single arg and multiple args
+ var cmdArgs []string
+ if args["args"] != "" {
+ // If args is provided as a single string, split by spaces
+ cmdArgs = strings.Fields(args["args"])
+ } else {
+ // If individual args are provided, collect them
+ argNum := 1
+ for {
+ argKey := fmt.Sprintf("arg%d", argNum)
+ if argValue, exists := args[argKey]; exists && argValue != "" {
+ cmdArgs = append(cmdArgs, argValue)
+ } else {
+ break
+ }
+ argNum++
+ }
+ }
+
+ // Execute with timeout for safety
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
+ cmd := exec.CommandContext(ctx, command, cmdArgs...)
+
+ output, err := cmd.CombinedOutput()
+ if err != nil {
+ msg := fmt.Sprintf("command '%s' failed; error: %v; output: %s", command, err, string(output))
+ logger.Error(msg)
+ return []byte(msg)
+ }
+
+ return output
+}
+
+// Helper functions for command execution
+
+func isCommandAllowed(command string) bool {
+ allowedCommands := map[string]bool{
+ "grep": true,
+ "sed": true,
+ "awk": true,
+ "find": true,
+ "cat": true,
+ "head": true,
+ "tail": true,
+ "sort": true,
+ "uniq": true,
+ "wc": true,
+ "ls": true,
+ "echo": true,
+ "cut": true,
+ "tr": true,
+ "cp": true,
+ "mv": true,
+ "rm": true,
+ "mkdir": true,
+ "rmdir": true,
+ "pwd": true,
+ }
+ return allowedCommands[command]
+}
+
type fnSig func(map[string]string) []byte
var fnMap = map[string]fnSig{
- "recall": recall,
- "recall_topics": recallTopics,
- "memorise": memorise,
- "websearch": websearch,
+ "recall": recall,
+ "recall_topics": recallTopics,
+ "memorise": memorise,
+ "websearch": websearch,
+ "file_create": fileCreate,
+ "file_read": fileRead,
+ "file_write": fileWrite,
+ "file_delete": fileDelete,
+ "file_move": fileMove,
+ "file_copy": fileCopy,
+ "file_list": fileList,
+ "execute_command": executeCommand,
}
// openai style def
@@ -257,4 +663,180 @@ var baseTools = []models.Tool{
},
},
},
+
+ // file_create
+ models.Tool{
+ Type: "function",
+ Function: models.ToolFunc{
+ Name: "file_create",
+ Description: "Create a new file with specified content. Use when you need to create a new file.",
+ Parameters: models.ToolFuncParams{
+ Type: "object",
+ Required: []string{"path"},
+ Properties: map[string]models.ToolArgProps{
+ "path": models.ToolArgProps{
+ Type: "string",
+ Description: "path where the file should be created",
+ },
+ "content": models.ToolArgProps{
+ Type: "string",
+ Description: "content to write to the file (optional, defaults to empty string)",
+ },
+ },
+ },
+ },
+ },
+
+ // file_read
+ models.Tool{
+ Type: "function",
+ Function: models.ToolFunc{
+ Name: "file_read",
+ Description: "Read the content of a file. Use when you need to see the content of a file.",
+ Parameters: models.ToolFuncParams{
+ Type: "object",
+ Required: []string{"path"},
+ Properties: map[string]models.ToolArgProps{
+ "path": models.ToolArgProps{
+ Type: "string",
+ Description: "path of the file to read",
+ },
+ },
+ },
+ },
+ },
+
+ // file_write
+ models.Tool{
+ Type: "function",
+ Function: models.ToolFunc{
+ Name: "file_write",
+ Description: "Write content to a file. Use when you want to create or modify a file (overwrite or append).",
+ Parameters: models.ToolFuncParams{
+ Type: "object",
+ Required: []string{"path", "content"},
+ Properties: map[string]models.ToolArgProps{
+ "path": models.ToolArgProps{
+ Type: "string",
+ Description: "path of the file to write to",
+ },
+ "content": models.ToolArgProps{
+ Type: "string",
+ Description: "content to write to the file",
+ },
+ "mode": models.ToolArgProps{
+ Type: "string",
+ Description: "write mode: 'overwrite' to replace entire file content, 'append' to add to the end (defaults to 'overwrite')",
+ },
+ },
+ },
+ },
+ },
+
+ // file_delete
+ models.Tool{
+ Type: "function",
+ Function: models.ToolFunc{
+ Name: "file_delete",
+ Description: "Delete a file. Use when you need to remove a file.",
+ Parameters: models.ToolFuncParams{
+ Type: "object",
+ Required: []string{"path"},
+ Properties: map[string]models.ToolArgProps{
+ "path": models.ToolArgProps{
+ Type: "string",
+ Description: "path of the file to delete",
+ },
+ },
+ },
+ },
+ },
+
+ // file_move
+ models.Tool{
+ Type: "function",
+ Function: models.ToolFunc{
+ Name: "file_move",
+ Description: "Move a file from one location to another. Use when you need to relocate a file.",
+ Parameters: models.ToolFuncParams{
+ Type: "object",
+ Required: []string{"src", "dst"},
+ Properties: map[string]models.ToolArgProps{
+ "src": models.ToolArgProps{
+ Type: "string",
+ Description: "source path of the file to move",
+ },
+ "dst": models.ToolArgProps{
+ Type: "string",
+ Description: "destination path where the file should be moved",
+ },
+ },
+ },
+ },
+ },
+
+ // file_copy
+ models.Tool{
+ Type: "function",
+ Function: models.ToolFunc{
+ Name: "file_copy",
+ Description: "Copy a file from one location to another. Use when you need to duplicate a file.",
+ Parameters: models.ToolFuncParams{
+ Type: "object",
+ Required: []string{"src", "dst"},
+ Properties: map[string]models.ToolArgProps{
+ "src": models.ToolArgProps{
+ Type: "string",
+ Description: "source path of the file to copy",
+ },
+ "dst": models.ToolArgProps{
+ Type: "string",
+ Description: "destination path where the file should be copied",
+ },
+ },
+ },
+ },
+ },
+
+ // file_list
+ models.Tool{
+ Type: "function",
+ Function: models.ToolFunc{
+ Name: "file_list",
+ Description: "List files and directories in a directory. Use when you need to see what files are in a directory.",
+ Parameters: models.ToolFuncParams{
+ Type: "object",
+ Required: []string{},
+ Properties: map[string]models.ToolArgProps{
+ "path": models.ToolArgProps{
+ Type: "string",
+ Description: "path of the directory to list (optional, defaults to current directory)",
+ },
+ },
+ },
+ },
+ },
+
+ // execute_command
+ models.Tool{
+ Type: "function",
+ Function: models.ToolFunc{
+ Name: "execute_command",
+ Description: "Execute a shell command safely. Use when you need to run system commands like grep sed awk find cat head tail sort uniq wc ls echo cut tr cp mv rm mkdir rmdir pwd",
+ Parameters: models.ToolFuncParams{
+ Type: "object",
+ Required: []string{"command"},
+ Properties: map[string]models.ToolArgProps{
+ "command": models.ToolArgProps{
+ Type: "string",
+ Description: "command to execute (only commands from whitelist are allowed: grep sed awk find cat head tail sort uniq wc ls echo cut tr cp mv rm mkdir rmdir pwd",
+ },
+ "args": models.ToolArgProps{
+ Type: "string",
+ Description: "command arguments as a single string (e.g., '-la {path}')",
+ },
+ },
+ },
+ },
+ },
}
diff --git a/tui.go b/tui.go
index 8667314..3c53359 100644
--- a/tui.go
+++ b/tui.go
@@ -8,6 +8,7 @@ import (
_ "image/jpeg"
_ "image/png"
"os"
+ "os/exec"
"path"
"slices"
"strconv"
@@ -71,7 +72,7 @@ var (
[yellow]Ctrl+v[white]: switch between /completion and /chat api (if provided in config)
[yellow]Ctrl+r[white]: start/stop recording from your microphone (needs stt server)
[yellow]Ctrl+t[white]: remove thinking (<think>) and tool messages from context (delete from chat)
-[yellow]Ctrl+l[white]: update connected model name (llamacpp)
+[yellow]Ctrl+l[white]: rotate through free OpenRouter models (if openrouter api) or update connected model name (llamacpp)
[yellow]Ctrl+k[white]: switch tool use (recommend tool use to llm after user msg)
[yellow]Ctrl+j[white]: if chat agent is char.png will show the image; then any key to return
[yellow]Ctrl+a[white]: interrupt tts (needs tts server)
@@ -80,7 +81,13 @@ var (
[yellow]Ctrl+q[white]: cycle through mentioned chars in chat, to pick persona to send next msg as
[yellow]Ctrl+x[white]: cycle through mentioned chars in chat, to pick persona to send next msg as (for llm)
[yellow]Alt+5[white]: toggle fullscreen for input/chat window
+[yellow]Alt+1[white]: toggle shell mode (execute commands locally)
+=== scrolling chat window (some keys similar to vim) ===
+[yellow]arrows up/down and j/k[white]: scroll up and down
+[yellow]gg/G[white]: jump to the begging / end of the chat
+
+=== status line ===
%s
Press Enter to go back
@@ -204,6 +211,102 @@ func makePropsForm(props map[string]float32) *tview.Form {
return form
}
+func toggleShellMode() {
+ shellMode = !shellMode
+ if shellMode {
+ // Update input placeholder to indicate shell mode
+ textArea.SetPlaceholder("SHELL MODE: Enter command and press <Esc> to execute")
+ } else {
+ // Reset to normal mode
+ textArea.SetPlaceholder("input is multiline; press <Enter> to start the next line;\npress <Esc> to send the message. Alt+1 to exit shell mode")
+ }
+ updateStatusLine()
+}
+
+func executeCommandAndDisplay(cmdText string) {
+ // Parse the command (split by spaces, but handle quoted arguments)
+ cmdParts := parseCommand(cmdText)
+ if len(cmdParts) == 0 {
+ fmt.Fprintf(textView, "\n[red]Error: No command provided[-:-:-]\n")
+ textView.ScrollToEnd()
+ colorText()
+ return
+ }
+
+ command := cmdParts[0]
+ args := []string{}
+ if len(cmdParts) > 1 {
+ args = cmdParts[1:]
+ }
+
+ // Create the command execution
+ cmd := exec.Command(command, args...)
+
+ // Execute the command and get output
+ output, err := cmd.CombinedOutput()
+
+ // Add the command being executed to the chat
+ fmt.Fprintf(textView, "\n[yellow]$ %s[-:-:-]\n", cmdText)
+
+ if err != nil {
+ // Include both output and error
+ fmt.Fprintf(textView, "[red]Error: %s[-:-:-]\n", err.Error())
+ if len(output) > 0 {
+ fmt.Fprintf(textView, "[red]%s[-:-:-]\n", string(output))
+ }
+ } else {
+ // Only output if successful
+ if len(output) > 0 {
+ fmt.Fprintf(textView, "[green]%s[-:-:-]\n", string(output))
+ } else {
+ fmt.Fprintf(textView, "[green]Command executed successfully (no output)[-:-:-]\n")
+ }
+ }
+
+ // Scroll to end and update colors
+ textView.ScrollToEnd()
+ colorText()
+}
+
+// parseCommand splits command string handling quotes properly
+func parseCommand(cmd string) []string {
+ var args []string
+ var current string
+ var inQuotes bool
+ var quoteChar rune
+
+ for _, r := range cmd {
+ switch r {
+ case '"', '\'':
+ if inQuotes {
+ if r == quoteChar {
+ inQuotes = false
+ } else {
+ current += string(r)
+ }
+ } else {
+ inQuotes = true
+ quoteChar = r
+ }
+ case ' ', '\t':
+ if inQuotes {
+ current += string(r)
+ } else if current != "" {
+ args = append(args, current)
+ current = ""
+ }
+ default:
+ current += string(r)
+ }
+ }
+
+ if current != "" {
+ args = append(args, current)
+ }
+
+ return args
+}
+
func init() {
tview.Styles = colorschemes["default"]
app = tview.NewApplication()
@@ -575,10 +678,21 @@ func init() {
return nil
}
if event.Key() == tcell.KeyCtrlL {
- go func() {
- fetchLCPModelName() // blocks
+ // Check if the current API is an OpenRouter API
+ if strings.Contains(cfg.CurrentAPI, "openrouter.ai/api/v1/") {
+ // Rotate through OpenRouter free models
+ if len(ORFreeModels) > 0 {
+ currentORModelIndex = (currentORModelIndex + 1) % len(ORFreeModels)
+ chatBody.Model = ORFreeModels[currentORModelIndex]
+ }
updateStatusLine()
- }()
+ } else {
+ // For non-OpenRouter APIs, use the old logic
+ go func() {
+ fetchLCPModelName() // blocks
+ updateStatusLine()
+ }()
+ }
return nil
}
if event.Key() == tcell.KeyCtrlT {
@@ -812,46 +926,59 @@ func init() {
pages.AddPage(RAGLoadedPage, chatLoadedRAGTable, true, true)
return nil
}
+ if event.Key() == tcell.KeyRune && event.Modifiers() == tcell.ModAlt && event.Rune() == '1' {
+ // Toggle shell mode: when enabled, commands are executed locally instead of sent to LLM
+ toggleShellMode()
+ return nil
+ }
// cannot send msg in editMode or botRespMode
if event.Key() == tcell.KeyEscape && !editMode && !botRespMode {
- // read all text into buffer
msgText := textArea.GetText()
- nl := "\n"
- prevText := textView.GetText(true)
- persona := cfg.UserRole
- // strings.LastIndex()
- // newline is not needed is prev msg ends with one
- if strings.HasSuffix(prevText, nl) {
- nl = ""
- }
- if msgText != "" {
- // as what char user sends msg?
- if cfg.WriteNextMsgAs != "" {
- persona = cfg.WriteNextMsgAs
+
+ if shellMode && msgText != "" {
+ // In shell mode, execute command instead of sending to LLM
+ executeCommandAndDisplay(msgText)
+ textArea.SetText("", true) // Clear the input area
+ return nil
+ } else if !shellMode {
+ // Normal mode - send to LLM
+ nl := "\n"
+ prevText := textView.GetText(true)
+ persona := cfg.UserRole
+ // strings.LastIndex()
+ // newline is not needed is prev msg ends with one
+ if strings.HasSuffix(prevText, nl) {
+ nl = ""
}
- // check if plain text
- if !injectRole {
- matches := roleRE.FindStringSubmatch(msgText)
- if len(matches) > 1 {
- persona = matches[1]
- msgText = strings.TrimLeft(msgText[len(matches[0]):], " ")
+ if msgText != "" {
+ // as what char user sends msg?
+ if cfg.WriteNextMsgAs != "" {
+ persona = cfg.WriteNextMsgAs
}
+ // check if plain text
+ if !injectRole {
+ matches := roleRE.FindStringSubmatch(msgText)
+ if len(matches) > 1 {
+ persona = matches[1]
+ msgText = strings.TrimLeft(msgText[len(matches[0]):], " ")
+ }
+ }
+ // add user icon before user msg
+ fmt.Fprintf(textView, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n",
+ nl, len(chatBody.Messages), persona, msgText)
+ textArea.SetText("", true)
+ textView.ScrollToEnd()
+ colorText()
}
- // add user icon before user msg
- fmt.Fprintf(textView, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n",
- nl, len(chatBody.Messages), persona, msgText)
- textArea.SetText("", true)
- textView.ScrollToEnd()
- colorText()
+ go chatRound(msgText, persona, textView, false, false)
+ // Also clear any image attachment after sending the message
+ go func() {
+ // Wait a short moment for the message to be processed, then clear the image attachment
+ // This allows the image to be sent with the current message if it was attached
+ // But clears it for the next message
+ ClearImageAttachment()
+ }()
}
- go chatRound(msgText, persona, textView, false, false)
- // Also clear any image attachment after sending the message
- go func() {
- // Wait a short moment for the message to be processed, then clear the image attachment
- // This allows the image to be sent with the current message if it was attached
- // But clears it for the next message
- ClearImageAttachment()
- }()
return nil
}
if event.Key() == tcell.KeyPgUp || event.Key() == tcell.KeyPgDn {