summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore1
-rw-r--r--bot.go41
-rw-r--r--llm.go44
-rw-r--r--models/models.go19
-rw-r--r--pngmeta/altwriter.go17
5 files changed, 77 insertions, 45 deletions
diff --git a/.gitignore b/.gitignore
index d99fe9d..d594085 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,3 +7,4 @@ history/
config.toml
sysprompts/*
history_bak/
+.aider*
diff --git a/bot.go b/bot.go
index 409a97b..e7cedc5 100644
--- a/bot.go
+++ b/bot.go
@@ -16,6 +16,7 @@ import (
"net/http"
"os"
"path"
+ "strconv"
"strings"
"time"
@@ -121,24 +122,30 @@ func fetchDSBalance() *models.DSBalance {
func sendMsgToLLM(body io.Reader) {
choseChunkParser()
+ bodyBytes, _ := io.ReadAll(body)
+ ok := json.Valid(bodyBytes)
+ if !ok {
+ panic("invalid json")
+ }
// nolint
- req, err := http.NewRequest("POST", cfg.CurrentAPI, body)
- req.Header.Add("Accept", "application/json")
- req.Header.Add("Content-Type", "application/json")
- req.Header.Add("Authorization", "Bearer "+cfg.DeepSeekToken)
- // nolint
- // resp, err := httpClient.Post(cfg.CurrentAPI, "application/json", body)
+ req, err := http.NewRequest("POST", cfg.CurrentAPI, bytes.NewReader(bodyBytes))
if err != nil {
- logger.Error("llamacpp api", "error", err)
+ logger.Error("newreq error", "error", err)
if err := notifyUser("error", "apicall failed:"+err.Error()); err != nil {
logger.Error("failed to notify", "error", err)
}
streamDone <- true
return
}
+ req.Header.Add("Accept", "application/json")
+ req.Header.Add("Content-Type", "application/json")
+ req.Header.Add("Authorization", "Bearer "+cfg.DeepSeekToken)
+ req.Header.Set("Content-Length", strconv.Itoa(len(bodyBytes)))
+ req.Header.Set("Accept-Encoding", "gzip")
+ // nolint
+ // resp, err := httpClient.Post(cfg.CurrentAPI, "application/json", body)
resp, err := httpClient.Do(req)
if err != nil {
- bodyBytes, _ := io.ReadAll(body)
logger.Error("llamacpp api", "error", err, "body", string(bodyBytes))
if err := notifyUser("error", "apicall failed:"+err.Error()); err != nil {
logger.Error("failed to notify", "error", err)
@@ -164,12 +171,13 @@ func sendMsgToLLM(body io.Reader) {
}
line, err := reader.ReadBytes('\n')
if err != nil {
- logger.Error("error reading response body", "error", err, "line", string(line))
- if err.Error() != "EOF" {
- streamDone <- true
- break
- }
- continue
+ logger.Error("error reading response body", "error", err, "line", string(line),
+ "reqbody", string(bodyBytes), "user_role", cfg.UserRole, "parser", chunkParser, "link", cfg.CurrentAPI)
+ // if err.Error() != "EOF" {
+ streamDone <- true
+ break
+ // }
+ // continue
}
if len(line) <= 1 {
if interruptResp {
@@ -192,8 +200,8 @@ func sendMsgToLLM(body io.Reader) {
break
}
// Handle error messages in response content
- if content != "" && strings.Contains(strings.ToLower(content), "error") {
- logger.Error("API error response detected", "content", content, "url", cfg.CurrentAPI)
+ if string(line) != "" && strings.Contains(strings.ToLower(string(line)), "error") {
+ logger.Error("API error response detected", "line", line, "url", cfg.CurrentAPI)
streamDone <- true
break
}
@@ -274,6 +282,7 @@ func chatRound(userMsg, role string, tv *tview.TextView, regen, resume bool) {
return
}
}
+ choseChunkParser()
reader, err := chunkParser.FormMsg(userMsg, role, resume)
if reader == nil || err != nil {
logger.Error("empty reader from msgs", "role", role, "error", err)
diff --git a/llm.go b/llm.go
index af8412d..40ddd87 100644
--- a/llm.go
+++ b/llm.go
@@ -18,12 +18,20 @@ func choseChunkParser() {
switch cfg.CurrentAPI {
case "http://localhost:8080/completion":
chunkParser = LlamaCPPeer{}
+ logger.Debug("chosen llamacppeer", "link", cfg.CurrentAPI)
+ return
case "http://localhost:8080/v1/chat/completions":
chunkParser = OpenAIer{}
+ logger.Debug("chosen openair", "link", cfg.CurrentAPI)
+ return
case "https://api.deepseek.com/beta/completions":
chunkParser = DeepSeekerCompletion{}
+ logger.Debug("chosen deepseekercompletio", "link", cfg.CurrentAPI)
+ return
case "https://api.deepseek.com/chat/completions":
chunkParser = DeepSeekerChat{}
+ logger.Debug("chosen deepseekerchat", "link", cfg.CurrentAPI)
+ return
default:
chunkParser = LlamaCPPeer{}
}
@@ -45,6 +53,7 @@ type DeepSeekerChat struct {
}
func (lcp LlamaCPPeer) FormMsg(msg, role string, resume bool) (io.Reader, error) {
+ logger.Debug("formmsg llamacppeer", "link", cfg.CurrentAPI)
if msg != "" { // otherwise let the bot to continue
newMsg := models.RoleMsg{Role: role, Content: msg}
chatBody.Messages = append(chatBody.Messages, newMsg)
@@ -124,6 +133,7 @@ func (op OpenAIer) ParseChunk(data []byte) (string, bool, error) {
}
func (op OpenAIer) FormMsg(msg, role string, resume bool) (io.Reader, error) {
+ logger.Debug("formmsg openaier", "link", cfg.CurrentAPI)
if cfg.ToolUse && !resume {
// prompt += "\n" + cfg.ToolRole + ":\n" + toolSysMsg
// add to chat body
@@ -168,6 +178,7 @@ func (ds DeepSeekerCompletion) ParseChunk(data []byte) (string, bool, error) {
}
func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader, error) {
+ logger.Debug("formmsg deepseekercompletion", "link", cfg.CurrentAPI)
if msg != "" { // otherwise let the bot to continue
newMsg := models.RoleMsg{Role: role, Content: msg}
chatBody.Messages = append(chatBody.Messages, newMsg)
@@ -212,21 +223,25 @@ func (ds DeepSeekerCompletion) FormMsg(msg, role string, resume bool) (io.Reader
}
func (ds DeepSeekerChat) ParseChunk(data []byte) (string, bool, error) {
- llmchunk := models.DSCompletionResp{}
+ llmchunk := models.DSChatStreamResp{}
if err := json.Unmarshal(data, &llmchunk); err != nil {
logger.Error("failed to decode", "error", err, "line", string(data))
return "", false, err
}
if llmchunk.Choices[0].FinishReason != "" {
- if llmchunk.Choices[0].Text != "" {
+ if llmchunk.Choices[0].Delta.Content != "" {
logger.Error("text inside of finish llmchunk", "chunk", llmchunk)
}
- return llmchunk.Choices[0].Text, true, nil
+ return llmchunk.Choices[0].Delta.Content, true, nil
}
- return llmchunk.Choices[0].Text, false, nil
+ if llmchunk.Choices[0].Delta.ReasoningContent != "" {
+ return llmchunk.Choices[0].Delta.ReasoningContent, false, nil
+ }
+ return llmchunk.Choices[0].Delta.Content, false, nil
}
func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, error) {
+ logger.Debug("formmsg deepseekerchat", "link", cfg.CurrentAPI)
if cfg.ToolUse && !resume {
// prompt += "\n" + cfg.ToolRole + ":\n" + toolSysMsg
// add to chat body
@@ -247,17 +262,24 @@ func (ds DeepSeekerChat) FormMsg(msg, role string, resume bool) (io.Reader, erro
}
}
// Create copy of chat body with standardized user role
- modifiedBody := *chatBody
- modifiedBody.Messages = make([]models.RoleMsg, len(chatBody.Messages))
+ // modifiedBody := *chatBody
+ bodyCopy := &models.ChatBody{
+ Messages: make([]models.RoleMsg, len(chatBody.Messages)),
+ Model: chatBody.Model,
+ Stream: chatBody.Stream,
+ }
+ // modifiedBody.Messages = make([]models.RoleMsg, len(chatBody.Messages))
for i, msg := range chatBody.Messages {
- if msg.Role == cfg.UserRole {
- modifiedBody.Messages[i].Role = "user"
+ logger.Debug("checking roles", "#", i, "role", msg.Role)
+ if msg.Role == cfg.UserRole || i == 1 {
+ bodyCopy.Messages[i].Role = "user"
+ logger.Debug("replaced role in body", "#", i)
} else {
- modifiedBody.Messages[i] = msg
+ bodyCopy.Messages[i] = msg
}
}
- models.NewDSCharReq(&modifiedBody)
- data, err := json.Marshal(chatBody)
+ dsBody := models.NewDSCharReq(*bodyCopy)
+ data, err := json.Marshal(dsBody)
if err != nil {
logger.Error("failed to form a msg", "error", err)
return nil, err
diff --git a/models/models.go b/models/models.go
index 574be1c..12311ff 100644
--- a/models/models.go
+++ b/models/models.go
@@ -123,7 +123,7 @@ type DSChatReq struct {
// TopLogprobs any `json:"top_logprobs"`
}
-func NewDSCharReq(cb *ChatBody) DSChatReq {
+func NewDSCharReq(cb ChatBody) DSChatReq {
return DSChatReq{
Messages: cb.Messages,
Model: cb.Model,
@@ -223,6 +223,23 @@ type DSChatResp struct {
} `json:"usage"`
}
+type DSChatStreamResp struct {
+ ID string `json:"id"`
+ Object string `json:"object"`
+ Created int `json:"created"`
+ Model string `json:"model"`
+ SystemFingerprint string `json:"system_fingerprint"`
+ Choices []struct {
+ Index int `json:"index"`
+ Delta struct {
+ Content string `json:"content"`
+ ReasoningContent string `json:"reasoning_content"`
+ } `json:"delta"`
+ Logprobs any `json:"logprobs"`
+ FinishReason string `json:"finish_reason"`
+ } `json:"choices"`
+}
+
type EmbeddingResp struct {
Embedding []float32 `json:"embedding"`
Index uint32 `json:"index"`
diff --git a/pngmeta/altwriter.go b/pngmeta/altwriter.go
index 5832c78..b031ba7 100644
--- a/pngmeta/altwriter.go
+++ b/pngmeta/altwriter.go
@@ -24,39 +24,32 @@ func WriteToPng(metadata *models.CharCardSpec, sourcePath, outfile string) error
if err != nil {
return err
}
-
jsonData, err := json.Marshal(metadata)
if err != nil {
return err
}
-
base64Data := base64.StdEncoding.EncodeToString(jsonData)
embedData := PngEmbed{
Key: "elefant", // Replace with appropriate key constant
Value: base64Data,
}
-
var outputBuffer bytes.Buffer
if _, err := outputBuffer.Write([]byte(pngHeader)); err != nil {
return err
}
-
chunks, iend, err := processChunks(pngData[8:])
if err != nil {
return err
}
-
for _, chunk := range chunks {
outputBuffer.Write(chunk)
}
-
newChunk, err := createTextChunk(embedData)
if err != nil {
return err
}
outputBuffer.Write(newChunk)
outputBuffer.Write(iend)
-
return os.WriteFile(outfile, outputBuffer.Bytes(), 0666)
}
@@ -67,7 +60,6 @@ func processChunks(data []byte) ([][]byte, []byte, error) {
iendChunk []byte
reader = bytes.NewReader(data)
)
-
for {
var chunkLength uint32
if err := binary.Read(reader, binary.BigEndian, &chunkLength); err != nil {
@@ -76,22 +68,18 @@ func processChunks(data []byte) ([][]byte, []byte, error) {
}
return nil, nil, fmt.Errorf("error reading chunk length: %w", err)
}
-
chunkType := make([]byte, 4)
if _, err := reader.Read(chunkType); err != nil {
return nil, nil, fmt.Errorf("error reading chunk type: %w", err)
}
-
chunkData := make([]byte, chunkLength)
if _, err := reader.Read(chunkData); err != nil {
return nil, nil, fmt.Errorf("error reading chunk data: %w", err)
}
-
crc := make([]byte, 4)
if _, err := reader.Read(crc); err != nil {
return nil, nil, fmt.Errorf("error reading CRC: %w", err)
}
-
fullChunk := bytes.NewBuffer(nil)
if err := binary.Write(fullChunk, binary.BigEndian, chunkLength); err != nil {
return nil, nil, fmt.Errorf("error writing chunk length: %w", err)
@@ -105,7 +93,6 @@ func processChunks(data []byte) ([][]byte, []byte, error) {
if _, err := fullChunk.Write(crc); err != nil {
return nil, nil, fmt.Errorf("error writing CRC: %w", err)
}
-
switch string(chunkType) {
case "IEND":
iendChunk = fullChunk.Bytes()
@@ -116,7 +103,6 @@ func processChunks(data []byte) ([][]byte, []byte, error) {
chunks = append(chunks, fullChunk.Bytes())
}
}
-
return nil, nil, errors.New("IEND chunk not found")
}
@@ -126,12 +112,10 @@ func createTextChunk(embed PngEmbed) ([]byte, error) {
content.WriteString(embed.Key)
content.WriteByte(0) // Null separator
content.WriteString(embed.Value)
-
data := content.Bytes()
crc := crc32.NewIEEE()
crc.Write([]byte(textChunkType))
crc.Write(data)
-
chunk := bytes.NewBuffer(nil)
if err := binary.Write(chunk, binary.BigEndian, uint32(len(data))); err != nil {
return nil, fmt.Errorf("error writing chunk length: %w", err)
@@ -145,6 +129,5 @@ func createTextChunk(embed PngEmbed) ([]byte, error) {
if err := binary.Write(chunk, binary.BigEndian, crc.Sum32()); err != nil {
return nil, fmt.Errorf("error writing CRC: %w", err)
}
-
return chunk.Bytes(), nil
}