summaryrefslogtreecommitdiff
path: root/llm.go
diff options
context:
space:
mode:
authorGrail Finder <wohilas@gmail.com>2025-03-10 22:05:40 +0300
committerGrail Finder (aider) <wohilas@gmail.com>2025-03-10 22:05:40 +0300
commit686bb1feddc85de054524005dbfc41dcef50b0f8 (patch)
tree9ac8dca2b189a42248895dad60804e7d7a123101 /llm.go
parentd963304f61b0014e92573185cfdcf0f55ce382ad (diff)
feat: add DeepSeekerChat chunk parser and refactor DeepSeeker to DeepSeekerCompletion
Diffstat (limited to 'llm.go')
-rw-r--r--llm.go57
1 files changed, 53 insertions, 4 deletions
diff --git a/llm.go b/llm.go
index cb4d537..2af1d4b 100644
--- a/llm.go
+++ b/llm.go
@@ -21,7 +21,9 @@ func choseChunkParser() {
case "http://localhost:8080/v1/chat/completions":
chunkParser = OpenAIer{}
case "https://api.deepseek.com/beta/completions":
- chunkParser = DeepSeeker{}
+ chunkParser = DeepSeekerCompletion{}
+ case "https://api.deepseek.com/chat/completions":
+ chunkParser = DeepSeekerChat{}
default:
chunkParser = LlamaCPPeer{}
}
@@ -37,7 +39,9 @@ type LlamaCPPeer struct {
}
type OpenAIer struct {
}
-type DeepSeeker struct {
+type DeepSeekerCompletion struct {
+}
+type DeepSeekerChat struct {
}
func (lcp LlamaCPPeer) FormMsg(msg, role string, resume bool) (io.Reader, error) {
@@ -148,7 +152,7 @@ func (op OpenAIer) FormMsg(msg, role string, resume bool) (io.Reader, error) {
}
// deepseek
-func (ds DeepSeeker) ParseChunk(data []byte) (string, bool, error) {
+func (ds DeepSeekerCompletion) ParseChunk(data []byte) (string, bool, error) {
llmchunk := models.DSCompletionResp{}
if err := json.Unmarshal(data, &llmchunk); err != nil {
logger.Error("failed to decode", "error", err, "line", string(data))
@@ -163,7 +167,7 @@ func (ds DeepSeeker) ParseChunk(data []byte) (string, bool, error) {
return llmchunk.Choices[0].Text, false, nil
}
-func (ds DeepSeeker) FormMsg(msg, role string, resume bool) (io.Reader, error) {
+func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader, error) {
if msg != "" { // otherwise let the bot to continue
newMsg := models.RoleMsg{Role: role, Content: msg}
chatBody.Messages = append(chatBody.Messages, newMsg)
@@ -206,3 +210,48 @@ func (ds DeepSeeker) FormMsg(msg, role string, resume bool) (io.Reader, error) {
}
return bytes.NewReader(data), nil
}
+
+func (ds DeepSeekerChat) ParseChunk(data []byte) (string, bool, error) {
+ llmchunk := models.DSCompletionResp{}
+ if err := json.Unmarshal(data, &llmchunk); err != nil {
+ logger.Error("failed to decode", "error", err, "line", string(data))
+ return "", false, err
+ }
+ if llmchunk.Choices[0].FinishReason != "" {
+ if llmchunk.Choices[0].Text != "" {
+ logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
+ }
+ return llmchunk.Choices[0].Text, true, nil
+ }
+ return llmchunk.Choices[0].Text, false, nil
+}
+
+func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
+ if cfg.ToolUse && !resume {
+ // prompt += "\n" + cfg.ToolRole + ":\n" + toolSysMsg
+ // add to chat body
+ chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
+ }
+ if msg != "" { // otherwise let the bot continue
+ newMsg := models.RoleMsg{Role: role, Content: msg}
+ chatBody.Messages = append(chatBody.Messages, newMsg)
+ // if rag
+ if cfg.RAGEnabled {
+ ragResp, err := chatRagUse(newMsg.Content)
+ if err != nil {
+ logger.Error("failed to form a rag msg", "error", err)
+ return nil, err
+ }
+ ragMsg := models.RoleMsg{Role: cfg.ToolRole, Content: ragResp}
+ chatBody.Messages = append(chatBody.Messages, ragMsg)
+ }
+ }
+ // copy chat body and replace config.UserRole with "user"; ai!
+ models.NewDSCharReq(chatBody)
+ data, err := json.Marshal(chatBody)
+ if err != nil {
+ logger.Error("failed to form a msg", "error", err)
+ return nil, err
+ }
+ return bytes.NewReader(data), nil
+}