summaryrefslogtreecommitdiff
path: root/llm.go
diff options
context:
space:
mode:
Diffstat (limited to 'llm.go')
-rw-r--r--llm.go8
1 files changed, 4 insertions, 4 deletions
diff --git a/llm.go b/llm.go
index 3307467..046d28d 100644
--- a/llm.go
+++ b/llm.go
@@ -2,8 +2,8 @@ package main
import (
"bytes"
- "gf-lt/models"
"encoding/json"
+ "gf-lt/models"
"io"
"strings"
)
@@ -88,10 +88,10 @@ func (lcp LlamaCPPeer) FormMsg(msg, role string, resume bool) (io.Reader, error)
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
"msg", msg, "resume", resume, "prompt", prompt)
var payload any
- payload = models.NewLCPReq(prompt, cfg, defaultLCPProps)
+ payload = models.NewLCPReq(prompt, cfg, defaultLCPProps, chatBody.MakeStopSlice())
if strings.Contains(chatBody.Model, "deepseek") {
payload = models.NewDSCompletionReq(prompt, chatBody.Model,
- defaultLCPProps["temp"], cfg)
+ defaultLCPProps["temp"], cfg, chatBody.MakeStopSlice())
}
data, err := json.Marshal(payload)
if err != nil {
@@ -213,7 +213,7 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
"msg", msg, "resume", resume, "prompt", prompt)
payload := models.NewDSCompletionReq(prompt, chatBody.Model,
- defaultLCPProps["temp"], cfg)
+ defaultLCPProps["temp"], cfg, chatBody.MakeStopSlice())
data, err := json.Marshal(payload)
if err != nil {
logger.Error("failed to form a msg", "error", err)