summaryrefslogtreecommitdiff
path: root/llm.go
diff options
context:
space:
mode:
authorGrail Finder <wohilas@gmail.com>2025-08-07 12:18:01 +0300
committerGrail Finder <wohilas@gmail.com>2025-08-07 12:18:01 +0300
commit813cb49d36edc987ecfad13291f58b6b044df3ba (patch)
tree16135431c421c5f1a06a10972805b20beb3dc2b8 /llm.go
parent9b2558ffe88993e36d19cfb951ef8eb6016cec36 (diff)
Feat: open router impl
Diffstat (limited to 'llm.go')
-rw-r--r--llm.go178
1 files changed, 168 insertions, 10 deletions
diff --git a/llm.go b/llm.go
index 046d28d..6d1cdbf 100644
--- a/llm.go
+++ b/llm.go
@@ -11,6 +11,7 @@ import (
type ChunkParser interface {
ParseChunk([]byte) (string, bool, error)
FormMsg(msg, role string, cont bool) (io.Reader, error)
+ GetToken() string
}
func choseChunkParser() {
@@ -32,15 +33,17 @@ func choseChunkParser() {
chunkParser = DeepSeekerChat{}
logger.Debug("chosen deepseekerchat", "link", cfg.CurrentAPI)
return
+ case "https://openrouter.ai/api/v1/completions":
+ chunkParser = OpenRouterCompletion{}
+ logger.Debug("chosen openroutercompletion", "link", cfg.CurrentAPI)
+ return
+ case "https://openrouter.ai/api/v1/chat/completions":
+ chunkParser = OpenRouterChat{}
+ logger.Debug("chosen openrouterchat", "link", cfg.CurrentAPI)
+ return
default:
chunkParser = LlamaCPPeer{}
}
- // if strings.Contains(cfg.CurrentAPI, "chat") {
- // logger.Debug("chosen chat parser")
- // chunkParser = OpenAIer{}
- // return
- // }
- // logger.Debug("chosen llamacpp /completion parser")
}
type LlamaCPPeer struct {
@@ -51,6 +54,16 @@ type DeepSeekerCompletion struct {
}
type DeepSeekerChat struct {
}
+type OpenRouterCompletion struct {
+ Model string
+}
+type OpenRouterChat struct {
+ Model string
+}
+
+func (lcp LlamaCPPeer) GetToken() string {
+ return ""
+}
func (lcp LlamaCPPeer) FormMsg(msg, role string, resume bool) (io.Reader, error) {
logger.Debug("formmsg llamacppeer", "link", cfg.CurrentAPI)
@@ -88,10 +101,10 @@ func (lcp LlamaCPPeer) FormMsg(msg, role string, resume bool) (io.Reader, error)
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
"msg", msg, "resume", resume, "prompt", prompt)
var payload any
- payload = models.NewLCPReq(prompt, cfg, defaultLCPProps, chatBody.MakeStopSlice())
- if strings.Contains(chatBody.Model, "deepseek") {
+ payload = models.NewLCPReq(prompt, defaultLCPProps, chatBody.MakeStopSlice())
+ if strings.Contains(chatBody.Model, "deepseek") { // TODO: why?
payload = models.NewDSCompletionReq(prompt, chatBody.Model,
- defaultLCPProps["temp"], cfg, chatBody.MakeStopSlice())
+ defaultLCPProps["temp"], chatBody.MakeStopSlice())
}
data, err := json.Marshal(payload)
if err != nil {
@@ -116,6 +129,10 @@ func (lcp LlamaCPPeer) ParseChunk(data []byte) (string, bool, error) {
return llmchunk.Content, false, nil
}
+func (op OpenAIer) GetToken() string {
+ return ""
+}
+
func (op OpenAIer) ParseChunk(data []byte) (string, bool, error) {
llmchunk := models.LLMRespChunk{}
if err := json.Unmarshal(data, &llmchunk); err != nil {
@@ -177,6 +194,10 @@ func (ds DeepSeekerCompletion) ParseChunk(data []byte) (string, bool, error) {
return llmchunk.Choices[0].Text, false, nil
}
+func (ds DeepSeekerCompletion) GetToken() string {
+ return cfg.DeepSeekToken
+}
+
func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader, error) {
logger.Debug("formmsg deepseekercompletion", "link", cfg.CurrentAPI)
if msg != "" { // otherwise let the bot to continue
@@ -213,7 +234,7 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader
logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
"msg", msg, "resume", resume, "prompt", prompt)
payload := models.NewDSCompletionReq(prompt, chatBody.Model,
- defaultLCPProps["temp"], cfg, chatBody.MakeStopSlice())
+ defaultLCPProps["temp"], chatBody.MakeStopSlice())
data, err := json.Marshal(payload)
if err != nil {
logger.Error("failed to form a msg", "error", err)
@@ -240,6 +261,10 @@ func (ds DeepSeekerChat) ParseChunk(data []byte) (string, bool, error) {
return llmchunk.Choices[0].Delta.Content, false, nil
}
+func (ds DeepSeekerChat) GetToken() string {
+ return cfg.DeepSeekToken
+}
+
func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
logger.Debug("formmsg deepseekerchat", "link", cfg.CurrentAPI)
if cfg.ToolUse && !resume {
@@ -286,3 +311,136 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
}
return bytes.NewReader(data), nil
}
+
+// openrouter
+func (or OpenRouterCompletion) ParseChunk(data []byte) (string, bool, error) {
+ llmchunk := models.OpenRouterCompletionResp{}
+ if err := json.Unmarshal(data, &llmchunk); err != nil {
+ logger.Error("failed to decode", "error", err, "line", string(data))
+ return "", false, err
+ }
+ content := llmchunk.Choices[len(llmchunk.Choices)-1].Text
+ if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" {
+ if content != "" {
+ logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
+ }
+ return content, true, nil
+ }
+ return content, false, nil
+}
+
+func (or OpenRouterCompletion) GetToken() string {
+ return cfg.OpenRouterToken
+}
+
+func (or OpenRouterCompletion) FormMsg(msg, role string, resume bool) (io.Reader, error) {
+ logger.Debug("formmsg openroutercompletion", "link", cfg.CurrentAPI)
+ if msg != "" { // otherwise let the bot to continue
+ newMsg := models.RoleMsg{Role: role, Content: msg}
+ chatBody.Messages = append(chatBody.Messages, newMsg)
+ // if rag
+ if cfg.RAGEnabled {
+ ragResp, err := chatRagUse(newMsg.Content)
+ if err != nil {
+ logger.Error("failed to form a rag msg", "error", err)
+ return nil, err
+ }
+ ragMsg := models.RoleMsg{Role: cfg.ToolRole, Content: ragResp}
+ chatBody.Messages = append(chatBody.Messages, ragMsg)
+ }
+ }
+ if cfg.ToolUse && !resume {
+ // add to chat body
+ chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
+ }
+ messages := make([]string, len(chatBody.Messages))
+ for i, m := range chatBody.Messages {
+ messages[i] = m.ToPrompt()
+ }
+ prompt := strings.Join(messages, "\n")
+ // strings builder?
+ if !resume {
+ botMsgStart := "\n" + cfg.AssistantRole + ":\n"
+ prompt += botMsgStart
+ }
+ if cfg.ThinkUse && !cfg.ToolUse {
+ prompt += "<think>"
+ }
+ logger.Debug("checking prompt for /completion", "tool_use", cfg.ToolUse,
+ "msg", msg, "resume", resume, "prompt", prompt)
+ payload := models.NewOpenRouterCompletionReq(chatBody.Model, prompt, defaultLCPProps, chatBody.MakeStopSlice())
+ data, err := json.Marshal(payload)
+ if err != nil {
+ logger.Error("failed to form a msg", "error", err)
+ return nil, err
+ }
+ return bytes.NewReader(data), nil
+}
+
+// chat
+func (or OpenRouterChat) ParseChunk(data []byte) (string, bool, error) {
+ llmchunk := models.OpenRouterChatResp{}
+ if err := json.Unmarshal(data, &llmchunk); err != nil {
+ logger.Error("failed to decode", "error", err, "line", string(data))
+ return "", false, err
+ }
+ content := llmchunk.Choices[len(llmchunk.Choices)-1].Delta.Content
+ if llmchunk.Choices[len(llmchunk.Choices)-1].FinishReason == "stop" {
+ if content != "" {
+ logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
+ }
+ return content, true, nil
+ }
+ return content, false, nil
+}
+
+func (or OpenRouterChat) GetToken() string {
+ return cfg.OpenRouterToken
+}
+
+func (or OpenRouterChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
+ logger.Debug("formmsg open router completion", "link", cfg.CurrentAPI)
+ if cfg.ToolUse && !resume {
+ // prompt += "\n" + cfg.ToolRole + ":\n" + toolSysMsg
+ // add to chat body
+ chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
+ }
+ if msg != "" { // otherwise let the bot continue
+ newMsg := models.RoleMsg{Role: role, Content: msg}
+ chatBody.Messages = append(chatBody.Messages, newMsg)
+ // if rag
+ if cfg.RAGEnabled {
+ ragResp, err := chatRagUse(newMsg.Content)
+ if err != nil {
+ logger.Error("failed to form a rag msg", "error", err)
+ return nil, err
+ }
+ ragMsg := models.RoleMsg{Role: cfg.ToolRole, Content: ragResp}
+ chatBody.Messages = append(chatBody.Messages, ragMsg)
+ }
+ }
+ // Create copy of chat body with standardized user role
+ // modifiedBody := *chatBody
+ bodyCopy := &models.ChatBody{
+ Messages: make([]models.RoleMsg, len(chatBody.Messages)),
+ Model: chatBody.Model,
+ Stream: chatBody.Stream,
+ }
+ // modifiedBody.Messages = make([]models.RoleMsg, len(chatBody.Messages))
+ for i, msg := range chatBody.Messages {
+ logger.Debug("checking roles", "#", i, "role", msg.Role)
+ if msg.Role == cfg.UserRole || i == 1 {
+ bodyCopy.Messages[i].Role = "user"
+ logger.Debug("replaced role in body", "#", i)
+ } else {
+ bodyCopy.Messages[i] = msg
+ }
+ }
+ dsBody := models.NewDSCharReq(*bodyCopy)
+ data, err := json.Marshal(dsBody)
+ if err != nil {
+ logger.Error("failed to form a msg", "error", err)
+ return nil, err
+ }
+ return bytes.NewReader(data), nil
+}