diff options
| -rw-r--r-- | bot.go | 32 | ||||
| -rw-r--r-- | tools.go | 20 |
2 files changed, 38 insertions, 14 deletions
@@ -11,6 +11,7 @@ import ( "gf-lt/models" "gf-lt/rag" "gf-lt/storage" + "html" "io" "log/slog" "net" @@ -473,10 +474,23 @@ out: func findCall(msg, toolCall string, tv *tview.TextView) { fc := &models.FuncCall{} if toolCall != "" { + // HTML-decode the tool call string to handle encoded characters like < -> <= + decodedToolCall := html.UnescapeString(toolCall) openAIToolMap := make(map[string]string) // respect tool call - if err := json.Unmarshal([]byte(toolCall), &openAIToolMap); err != nil { - logger.Error("failed to unmarshal openai tool call", "call", toolCall, "error", err) + if err := json.Unmarshal([]byte(decodedToolCall), &openAIToolMap); err != nil { + logger.Error("failed to unmarshal openai tool call", "call", decodedToolCall, "error", err) + // Send error response to LLM so it can retry or handle the error + toolResponseMsg := models.RoleMsg{ + Role: cfg.ToolRole, + Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err), + ToolCallID: lastToolCallID, // Use the stored tool call ID + } + chatBody.Messages = append(chatBody.Messages, toolResponseMsg) + // Clear the stored tool call ID after using it + lastToolCallID = "" + // Trigger the assistant to continue processing with the error message + chatRound("", cfg.AssistantRole, tv, false, false) return } lastToolCall.Args = openAIToolMap @@ -489,8 +503,18 @@ func findCall(msg, toolCall string, tv *tview.TextView) { prefix := "__tool_call__\n" suffix := "\n__tool_call__" jsStr = strings.TrimSuffix(strings.TrimPrefix(jsStr, prefix), suffix) - if err := json.Unmarshal([]byte(jsStr), &fc); err != nil { - logger.Error("failed to unmarshal tool call", "error", err, "json_string", jsStr) + // HTML-decode the JSON string to handle encoded characters like < -> <= + decodedJsStr := html.UnescapeString(jsStr) + if err := json.Unmarshal([]byte(decodedJsStr), &fc); err != nil { + logger.Error("failed to unmarshal tool call", "error", err, "json_string", decodedJsStr) + // Send error response to LLM so it can retry or handle the error + toolResponseMsg := models.RoleMsg{ + Role: cfg.ToolRole, + Content: fmt.Sprintf("Error processing tool call: %v. Please check the JSON format and try again.", err), + } + chatBody.Messages = append(chatBody.Messages, toolResponseMsg) + // Trigger the assistant to continue processing with the error message + chatRound("", cfg.AssistantRole, tv, false, false) return } } @@ -228,10 +228,10 @@ func fileRead(args map[string]string) []byte { return jsonResult } -func fileUpdate(args map[string]string) []byte { +func fileWrite(args map[string]string) []byte { path, ok := args["path"] if !ok || path == "" { - msg := "path not provided to file_update tool" + msg := "path not provided to file_write tool" logger.Error(msg) return []byte(msg) } @@ -249,7 +249,7 @@ func fileUpdate(args map[string]string) []byte { switch mode { case "overwrite": if err := writeStringToFile(path, content); err != nil { - msg := "failed to update file; error: " + err.Error() + msg := "failed to write to file; error: " + err.Error() logger.Error(msg) return []byte(msg) } @@ -265,7 +265,7 @@ func fileUpdate(args map[string]string) []byte { return []byte(msg) } - msg := "file updated successfully at " + path + msg := "file written successfully at " + path return []byte(msg) } @@ -533,7 +533,7 @@ var fnMap = map[string]fnSig{ "websearch": websearch, "file_create": fileCreate, "file_read": fileRead, - "file_update": fileUpdate, + "file_write": fileWrite, "file_delete": fileDelete, "file_move": fileMove, "file_copy": fileCopy, @@ -661,19 +661,19 @@ var baseTools = []models.Tool{ }, }, - // file_update + // file_write models.Tool{ Type: "function", Function: models.ToolFunc{ - Name: "file_update", - Description: "Update a file with new content. Use when you want to modify an existing file (overwrite or append).", + Name: "file_write", + Description: "Write content to a file. Use when you want to create or modify a file (overwrite or append).", Parameters: models.ToolFuncParams{ Type: "object", Required: []string{"path", "content"}, Properties: map[string]models.ToolArgProps{ "path": models.ToolArgProps{ Type: "string", - Description: "path of the file to update", + Description: "path of the file to write to", }, "content": models.ToolArgProps{ Type: "string", @@ -681,7 +681,7 @@ var baseTools = []models.Tool{ }, "mode": models.ToolArgProps{ Type: "string", - Description: "update mode: 'overwrite' to replace entire file content, 'append' to add to the end (defaults to 'overwrite')", + Description: "write mode: 'overwrite' to replace entire file content, 'append' to add to the end (defaults to 'overwrite')", }, }, }, |
