diff options
| author | Grail Finder <wohilas@gmail.com> | 2026-02-04 12:47:54 +0300 |
|---|---|---|
| committer | Grail Finder <wohilas@gmail.com> | 2026-02-04 12:47:54 +0300 |
| commit | 7187df509fe9cc506695a1036b840e03eeb25cff (patch) | |
| tree | 394b40b5375909293ead99e12c733fc1f91370f5 /llm.go | |
| parent | 79861e7c2bc6f2ed95309ca6e83577ddc4e2c63a (diff) | |
Enha: stricter stop string
Diffstat (limited to 'llm.go')
| -rw-r--r-- | llm.go | 6 |
1 files changed, 3 insertions, 3 deletions
@@ -207,7 +207,7 @@ func (lcp LCPCompletion) FormMsg(msg, role string, resume bool) (io.Reader, erro logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse, "msg", msg, "resume", resume, "prompt", prompt, "multimodal_data_count", len(multimodalData)) payload := models.NewLCPReq(prompt, chatBody.Model, multimodalData, - defaultLCPProps, chatBody.MakeStopSliceExcluding(botPersona, listChatRoles())) + defaultLCPProps, chatBody.MakeStopSliceExcluding("", listChatRoles())) data, err := json.Marshal(payload) if err != nil { logger.Error("failed to form a msg", "error", err) @@ -444,7 +444,7 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader "msg", msg, "resume", resume, "prompt", prompt) payload := models.NewDSCompletionReq(prompt, chatBody.Model, defaultLCPProps["temp"], - chatBody.MakeStopSliceExcluding(botPersona, listChatRoles())) + chatBody.MakeStopSliceExcluding("", listChatRoles())) data, err := json.Marshal(payload) if err != nil { logger.Error("failed to form a msg", "error", err) @@ -604,7 +604,7 @@ func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader if cfg.ThinkUse && !cfg.ToolUse { prompt += "<think>" } - stopSlice := chatBody.MakeStopSliceExcluding(botPersona, listChatRoles()) + stopSlice := chatBody.MakeStopSliceExcluding("", listChatRoles()) logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse, "msg", msg, "resume", resume, "prompt", prompt, "stop_strings", stopSlice) payload := models.NewOpenRouterCompletionReq(chatBody.Model, prompt, |
