diff options
author | Grail Finder <wohilas@gmail.com> | 2025-08-08 10:51:14 +0300 |
---|---|---|
committer | Grail Finder <wohilas@gmail.com> | 2025-08-08 10:51:14 +0300 |
commit | 589dfdda3fa89ecc984530ce3bfcc58ee2fd851d (patch) | |
tree | 1b14f5cede90ac7e6000f71f4d3c42f12985dee4 /llm.go | |
parent | 14558f98cd5621e9c5019ba1ba957f06e86d2cc6 (diff) |
Feat: tool chunk channel for openai tool calls
Diffstat (limited to 'llm.go')
-rw-r--r-- | llm.go | 47 |
1 files changed, 28 insertions, 19 deletions
@@ -246,22 +246,27 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader return bytes.NewReader(data), nil } -func (ds DeepSeekerChat) ParseChunk(data []byte) (string, bool, error) { +func (ds DeepSeekerChat) ParseChunk(data []byte) (*models.TextChunk, error) { llmchunk := models.DSChatStreamResp{} if err := json.Unmarshal(data, &llmchunk); err != nil { logger.Error("failed to decode", "error", err, "line", string(data)) - return "", false, err + return nil, err } + resp := &models.TextChunk{} if llmchunk.Choices[0].FinishReason != "" { if llmchunk.Choices[0].Delta.Content != "" { logger.Error("text inside of finish llmchunk", "chunk", llmchunk) } - return llmchunk.Choices[0].Delta.Content, true, nil - } - if llmchunk.Choices[0].Delta.ReasoningContent != "" { - return llmchunk.Choices[0].Delta.ReasoningContent, false, nil + resp.Chunk = llmchunk.Choices[0].Delta.Content + resp.Finished = true + } else { + if llmchunk.Choices[0].Delta.ReasoningContent != "" { + resp.Chunk = llmchunk.Choices[0].Delta.ReasoningContent + } else { + resp.Chunk = llmchunk.Choices[0].Delta.Content + } } - return llmchunk.Choices[0].Delta.Content, false, nil + return resp, nil } func (ds DeepSeekerChat) GetToken() string { @@ -316,20 +321,22 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro } // openrouter -func (or OpenRouterCompletion) ParseChunk(data []byte) (string, bool, error) { +func (or OpenRouterCompletion) ParseChunk(data []byte) (*models.TextChunk, error) { llmchunk := models.OpenRouterCompletionResp{} if err := json.Unmarshal(data, &llmchunk); err != nil { logger.Error("failed to decode", "error", err, "line", string(data)) - return "", false, err + return nil, err + } + resp := &models.TextChunk{ + Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Text, } - content := llmchunk.Choices[len(llmchunk.Choices)-1].Text if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" { - if content != "" { + if resp.Chunk != "" { logger.Error("text inside of finish llmchunk", "chunk", llmchunk) } - return content, true, nil + resp.Finished = true } - return content, false, nil + return resp, nil } func (or OpenRouterCompletion) GetToken() string { @@ -381,20 +388,22 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader } // chat -func (or OpenRouterChat) ParseChunk(data []byte) (string, bool, error) { +func (or OpenRouterChat) ParseChunk(data []byte) (*models.TextChunk, error) { llmchunk := models.OpenRouterChatResp{} if err := json.Unmarshal(data, &llmchunk); err != nil { logger.Error("failed to decode", "error", err, "line", string(data)) - return "", false, err + return nil, err + } + resp := &models.TextChunk{ + Chunk: llmchunk.Choices[len(llmchunk.Choices)-1].Delta.Content, } - content := llmchunk.Choices[len(llmchunk.Choices)-1].Delta.Content if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" { - if content != "" { + if resp.Chunk != "" { logger.Error("text inside of finish llmchunk", "chunk", llmchunk) } - return content, true, nil + resp.Finished = true } - return content, false, nil + return resp, nil } func (or OpenRouterChat) GetToken() string { |