diff options
| author | Grail Finder <wohilas@gmail.com> | 2026-02-10 11:05:09 +0300 |
|---|---|---|
| committer | Grail Finder <wohilas@gmail.com> | 2026-02-10 11:05:09 +0300 |
| commit | 875de679cf4732bbd70361d826d65d9d8b190add (patch) | |
| tree | e4ad90c2bdde747ad057e4e7d6e850c3173018fd /agent | |
| parent | 3b542421e35180db9b9f972ee973befc42f1a46f (diff) | |
| parent | 37b98ad36cd7e63d96a190017b78fe35143a2e6a (diff) | |
Merge branch 'feat/char-secrets'
Diffstat (limited to 'agent')
| -rw-r--r-- | agent/request.go | 14 |
1 files changed, 8 insertions, 6 deletions
diff --git a/agent/request.go b/agent/request.go index bb4a80d..14009dd 100644 --- a/agent/request.go +++ b/agent/request.go @@ -77,17 +77,18 @@ func (ag *AgentClient) buildRequest(sysprompt, msg string) ([]byte, error) { } prompt := strings.TrimSpace(sb.String()) - if isDeepSeek { + switch { + case isDeepSeek: // DeepSeek completion req := models.NewDSCompletionReq(prompt, model, defaultProps["temperature"], []string{}) req.Stream = false // Agents don't need streaming return json.Marshal(req) - } else if isOpenRouter { + case isOpenRouter: // OpenRouter completion req := models.NewOpenRouterCompletionReq(model, prompt, defaultProps, []string{}) req.Stream = false // Agents don't need streaming return json.Marshal(req) - } else { + default: // Assume llama.cpp completion req := models.NewLCPReq(prompt, model, nil, defaultProps, []string{}) req.Stream = false // Agents don't need streaming @@ -103,15 +104,16 @@ func (ag *AgentClient) buildRequest(sysprompt, msg string) ([]byte, error) { Messages: messages, } - if isDeepSeek { + switch { + case isDeepSeek: // DeepSeek chat req := models.NewDSChatReq(*chatBody) return json.Marshal(req) - } else if isOpenRouter { + case isOpenRouter: // OpenRouter chat req := models.NewOpenRouterChatReq(*chatBody, defaultProps) return json.Marshal(req) - } else { + default: // Assume llama.cpp chat (OpenAI format) req := models.OpenAIReq{ ChatBody: chatBody, |
