diff options
| author | Grail Finder <wohilas@gmail.com> | 2026-02-06 12:42:06 +0300 |
|---|---|---|
| committer | Grail Finder <wohilas@gmail.com> | 2026-02-06 12:42:06 +0300 |
| commit | 4af866079c3f21eab12b02c3158567539ca40c50 (patch) | |
| tree | d428557aae121ba89e66c728240723a9c4e718ec /agent/request.go | |
| parent | 478a505869bf26b15dcbc77feb2c09c1f2ff4aac (diff) | |
Chore: linter complaints
Diffstat (limited to 'agent/request.go')
| -rw-r--r-- | agent/request.go | 14 |
1 files changed, 8 insertions, 6 deletions
diff --git a/agent/request.go b/agent/request.go index bb4a80d..14009dd 100644 --- a/agent/request.go +++ b/agent/request.go @@ -77,17 +77,18 @@ func (ag *AgentClient) buildRequest(sysprompt, msg string) ([]byte, error) { } prompt := strings.TrimSpace(sb.String()) - if isDeepSeek { + switch { + case isDeepSeek: // DeepSeek completion req := models.NewDSCompletionReq(prompt, model, defaultProps["temperature"], []string{}) req.Stream = false // Agents don't need streaming return json.Marshal(req) - } else if isOpenRouter { + case isOpenRouter: // OpenRouter completion req := models.NewOpenRouterCompletionReq(model, prompt, defaultProps, []string{}) req.Stream = false // Agents don't need streaming return json.Marshal(req) - } else { + default: // Assume llama.cpp completion req := models.NewLCPReq(prompt, model, nil, defaultProps, []string{}) req.Stream = false // Agents don't need streaming @@ -103,15 +104,16 @@ func (ag *AgentClient) buildRequest(sysprompt, msg string) ([]byte, error) { Messages: messages, } - if isDeepSeek { + switch { + case isDeepSeek: // DeepSeek chat req := models.NewDSChatReq(*chatBody) return json.Marshal(req) - } else if isOpenRouter { + case isOpenRouter: // OpenRouter chat req := models.NewOpenRouterChatReq(*chatBody, defaultProps) return json.Marshal(req) - } else { + default: // Assume llama.cpp chat (OpenAI format) req := models.OpenAIReq{ ChatBody: chatBody, |
