summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGrail Finder <wohilas@gmail.com>2025-11-25 11:40:37 +0300
committerGrail Finder <wohilas@gmail.com>2025-11-25 11:40:37 +0300
commit01da37b3971a6aa1c3c051c34666672b339ae2b6 (patch)
treedd5abf293104b8b3affee9195ec27a186ae9d4c5
parentfc963f86c94f8d96224414e409fda757f3b2d11d (diff)
Fix: openrouter func ctx resp
-rw-r--r--bot.go35
-rw-r--r--llm.go14
-rw-r--r--models/models.go44
3 files changed, 70 insertions, 23 deletions
diff --git a/bot.go b/bot.go
index e8a13e3..2962eb7 100644
--- a/bot.go
+++ b/bot.go
@@ -44,6 +44,7 @@ var (
ragger *rag.RAG
chunkParser ChunkParser
lastToolCall *models.FuncCall
+ lastToolCallID string // Store the ID of the most recent tool call
//nolint:unused // TTS_ENABLED conditionally uses this
orator extra.Orator
asr extra.STT
@@ -290,6 +291,8 @@ func sendMsgToLLM(body io.Reader) {
openAIToolChan <- chunk.ToolChunk
if chunk.FuncName != "" {
lastToolCall.Name = chunk.FuncName
+ // Store the tool call ID for the response
+ lastToolCallID = chunk.ToolID
}
interrupt:
if interruptResp { // read bytes, so it would not get into beginning of the next req
@@ -492,14 +495,40 @@ func findCall(msg, toolCall string, tv *tview.TextView) {
f, ok := fnMap[fc.Name]
if !ok {
m := fc.Name + " is not implemented"
- chatRound(m, cfg.ToolRole, tv, false, false)
+ // Create tool response message with the proper tool_call_id
+ toolResponseMsg := models.RoleMsg{
+ Role: cfg.ToolRole,
+ Content: m,
+ ToolCallID: lastToolCallID, // Use the stored tool call ID
+ }
+ chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
+ // Clear the stored tool call ID after using it
+ lastToolCallID = ""
+
+ // Trigger the assistant to continue processing with the new tool response
+ // by calling chatRound with empty content to continue the assistant's response
+ chatRound("", cfg.AssistantRole, tv, false, false)
return
}
resp := f(fc.Args)
- toolMsg := fmt.Sprintf("tool response: %+v", string(resp))
+ toolMsg := string(resp) // Remove the "tool response: " prefix and %+v formatting
fmt.Fprintf(tv, "%s[-:-:b](%d) <%s>: [-:-:-]\n%s\n",
"\n", len(chatBody.Messages), cfg.ToolRole, toolMsg)
- chatRound(toolMsg, cfg.ToolRole, tv, false, false)
+
+ // Create tool response message with the proper tool_call_id
+ toolResponseMsg := models.RoleMsg{
+ Role: cfg.ToolRole,
+ Content: toolMsg,
+ ToolCallID: lastToolCallID, // Use the stored tool call ID
+ }
+ chatBody.Messages = append(chatBody.Messages, toolResponseMsg)
+
+ // Clear the stored tool call ID after using it
+ lastToolCallID = ""
+
+ // Trigger the assistant to continue processing with the new tool response
+ // by calling chatRound with empty content to continue the assistant's response
+ chatRound("", cfg.AssistantRole, tv, false, false)
}
func chatToTextSlice(showSys bool) []string {
diff --git a/llm.go b/llm.go
index 47a1d46..e7245ce 100644
--- a/llm.go
+++ b/llm.go
@@ -160,11 +160,14 @@ func (op OpenAIer) ParseChunk(data []byte) (*models.TextChunk, error) {
Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Delta.Content,
}
if len(llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls) > 0 {
- resp.ToolChunk = llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls[0].Function.Arguments
- fname := llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls[0].Function.Name
+ toolCall := llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls[0]
+ resp.ToolChunk = toolCall.Function.Arguments
+ fname := toolCall.Function.Name
if fname != "" {
resp.FuncName = fname
}
+ // Capture the tool call ID if available
+ resp.ToolID = toolCall.ID
}
if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" {
if resp.Chunk != "" {
@@ -471,11 +474,14 @@ func (or OpenRouterChat) ParseChunk(data []byte) (*models.TextChunk, error) {
// Handle tool calls similar to OpenAIer
if len(llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls) > 0 {
- resp.ToolChunk = llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls[0].Function.Arguments
- fname := llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls[0].Function.Name
+ toolCall := llmchunk.Choices[len(llmchunk.Choices)-1].Delta.ToolCalls[0]
+ resp.ToolChunk = toolCall.Function.Arguments
+ fname := toolCall.Function.Name
if fname != "" {
resp.FuncName = fname
}
+ // Capture the tool call ID if available
+ resp.ToolID = toolCall.ID
}
if resp.ToolChunk != "" {
resp.ToolResp = true
diff --git a/models/models.go b/models/models.go
index 58f0291..798ea35 100644
--- a/models/models.go
+++ b/models/models.go
@@ -9,6 +9,7 @@ import (
)
type FuncCall struct {
+ ID string `json:"id,omitempty"`
Name string `json:"name"`
Args map[string]string `json:"args"`
}
@@ -39,6 +40,7 @@ type ToolDeltaFunc struct {
}
type ToolDeltaResp struct {
+ ID string `json:"id,omitempty"`
Index int `json:"index"`
Function ToolDeltaFunc `json:"function"`
}
@@ -70,6 +72,7 @@ type TextChunk struct {
Finished bool
ToolResp bool
FuncName string
+ ToolID string
}
type TextContentPart struct {
@@ -86,10 +89,11 @@ type ImageContentPart struct {
// RoleMsg represents a message with content that can be either a simple string or structured content parts
type RoleMsg struct {
- Role string `json:"role"`
- Content string `json:"-"`
- ContentParts []interface{} `json:"-"`
- hasContentParts bool // Flag to indicate which content type to marshal
+ Role string `json:"role"`
+ Content string `json:"-"`
+ ContentParts []interface{} `json:"-"`
+ ToolCallID string `json:"tool_call_id,omitempty"` // For tool response messages
+ hasContentParts bool // Flag to indicate which content type to marshal
}
// MarshalJSON implements custom JSON marshaling for RoleMsg
@@ -97,21 +101,25 @@ func (m RoleMsg) MarshalJSON() ([]byte, error) {
if m.hasContentParts {
// Use structured content format
aux := struct {
- Role string `json:"role"`
- Content []interface{} `json:"content"`
+ Role string `json:"role"`
+ Content []interface{} `json:"content"`
+ ToolCallID string `json:"tool_call_id,omitempty"`
}{
- Role: m.Role,
- Content: m.ContentParts,
+ Role: m.Role,
+ Content: m.ContentParts,
+ ToolCallID: m.ToolCallID,
}
return json.Marshal(aux)
} else {
// Use simple content format
aux := struct {
- Role string `json:"role"`
- Content string `json:"content"`
+ Role string `json:"role"`
+ Content string `json:"content"`
+ ToolCallID string `json:"tool_call_id,omitempty"`
}{
- Role: m.Role,
- Content: m.Content,
+ Role: m.Role,
+ Content: m.Content,
+ ToolCallID: m.ToolCallID,
}
return json.Marshal(aux)
}
@@ -121,26 +129,30 @@ func (m RoleMsg) MarshalJSON() ([]byte, error) {
func (m *RoleMsg) UnmarshalJSON(data []byte) error {
// First, try to unmarshal as structured content format
var structured struct {
- Role string `json:"role"`
- Content []interface{} `json:"content"`
+ Role string `json:"role"`
+ Content []interface{} `json:"content"`
+ ToolCallID string `json:"tool_call_id,omitempty"`
}
if err := json.Unmarshal(data, &structured); err == nil && len(structured.Content) > 0 {
m.Role = structured.Role
m.ContentParts = structured.Content
+ m.ToolCallID = structured.ToolCallID
m.hasContentParts = true
return nil
}
// Otherwise, unmarshal as simple content format
var simple struct {
- Role string `json:"role"`
- Content string `json:"content"`
+ Role string `json:"role"`
+ Content string `json:"content"`
+ ToolCallID string `json:"tool_call_id,omitempty"`
}
if err := json.Unmarshal(data, &simple); err != nil {
return err
}
m.Role = simple.Role
m.Content = simple.Content
+ m.ToolCallID = simple.ToolCallID
m.hasContentParts = false
return nil
}