diff options
-rw-r--r-- | README.md | 9 | ||||
-rw-r--r-- | bot.go | 31 | ||||
-rw-r--r-- | config.example.toml | 1 | ||||
-rw-r--r-- | config/config.go | 2 | ||||
-rw-r--r-- | models/card.go | 41 | ||||
-rw-r--r-- | models/db.go | 6 | ||||
-rw-r--r-- | models/models.go | 22 | ||||
-rw-r--r-- | pngmeta/metareader.go | 107 | ||||
-rw-r--r-- | pngmeta/metareader_test.go | 33 | ||||
-rw-r--r-- | pngmeta/partsreader.go | 77 | ||||
-rw-r--r-- | session.go | 8 | ||||
-rw-r--r-- | sysprompts/default_Seraphina.png | bin | 0 -> 551901 bytes | |||
-rw-r--r-- | sysprompts/llama.png | bin | 0 -> 620775 bytes | |||
-rw-r--r-- | tools.go | 18 | ||||
-rw-r--r-- | tui.go | 28 |
15 files changed, 346 insertions, 37 deletions
@@ -18,12 +18,16 @@ - sqlite for the bot memory; + - rename current chat; + - help page with all key bindings; + -- change temp, min-p and other params from tui; - default config file (api url, path to sysprompts, path to log, limits, etc); + +- change temp, min-p and other params from tui; - fullscreen textarea option (bothersome to implement); - consider adding use /completion of llamacpp, since openai endpoint clearly has template|format issues; - export whole chat into a json file; -- directoty with sys prompts; +- directoty with sys prompts (charcards png & json); +- separate messages that are stored and chat and send to the bot, i.e. option to omit tool calls (there might be a point where they are no longer needed in ctx); +- colourschemes, colours or markdown of quotes and styles; +- RAG support|implementation; +- change card-chat pair with one binding; ### FIX: - bot responding (or haninging) blocks everything; + @@ -38,3 +42,4 @@ - lets say we have two (or more) agents with the same name across multiple chats. These agents go and ask db for topics they memoriesed. Now they can access topics that aren't meant for them. (so memory should have an option: shareble; that indicates if that memory can be shared across chats); - if option to show sys msg enabled: it show display new tool responses; - when bot generation ended with err: need a way to switch back to the bot_resp_false mode; +- no selection focus on modal sys buttons after opening it a second time; @@ -28,7 +28,7 @@ var ( chatBody *models.ChatBody store storage.FullRepo defaultFirstMsg = "Hello! What can I do for you?" - defaultStarter = []models.MessagesStory{} + defaultStarter = []models.RoleMsg{} defaultStarterBytes = []byte{} interruptResp = false ) @@ -37,7 +37,7 @@ var ( func formMsg(chatBody *models.ChatBody, newMsg, role string) io.Reader { if newMsg != "" { // otherwise let the bot continue - newMsg := models.MessagesStory{Role: role, Content: newMsg} + newMsg := models.RoleMsg{Role: role, Content: newMsg} chatBody.Messages = append(chatBody.Messages, newMsg) } data, err := json.Marshal(chatBody) @@ -128,7 +128,7 @@ out: } } botRespMode = false - chatBody.Messages = append(chatBody.Messages, models.MessagesStory{ + chatBody.Messages = append(chatBody.Messages, models.RoleMsg{ Role: cfg.AssistantRole, Content: respText.String(), }) // bot msg is done; @@ -182,8 +182,17 @@ func chatToText(showSys bool) string { return strings.Join(s, "") } -// func textToMsg(rawMsg string) models.MessagesStory { -// msg := models.MessagesStory{} +func applyCharCard(cc *models.CharCard) { + cfg.AssistantRole = cc.Role + newChat := []models.RoleMsg{ + {Role: "system", Content: cc.SysPrompt}, + {Role: cfg.AssistantRole, Content: cc.FirstMsg}, + } + chatBody.Messages = newChat +} + +// func textToMsg(rawMsg string) models.RoleMsg { +// msg := models.RoleMsg{} // // system and tool? // if strings.HasPrefix(rawMsg, cfg.AssistantIcon) { // msg.Role = cfg.AssistantRole @@ -198,8 +207,8 @@ func chatToText(showSys bool) string { // return msg // } -// func textSliceToChat(chat []string) []models.MessagesStory { -// resp := make([]models.MessagesStory, len(chat)) +// func textSliceToChat(chat []string) []models.RoleMsg { +// resp := make([]models.RoleMsg, len(chat)) // for i, rawMsg := range chat { // msg := textToMsg(rawMsg) // resp[i] = msg @@ -209,8 +218,8 @@ func chatToText(showSys bool) string { func init() { cfg = config.LoadConfigOrDefault("config.example.toml") - defaultStarter = []models.MessagesStory{ - {Role: "system", Content: systemMsg}, + defaultStarter = []models.RoleMsg{ + {Role: "system", Content: basicSysMsg}, {Role: cfg.AssistantRole, Content: defaultFirstMsg}, } file, err := os.OpenFile(cfg.LogFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) @@ -223,6 +232,10 @@ func init() { logger.Error("failed to marshal defaultStarter", "error", err) return } + // load cards + basicCard.Role = cfg.AssistantRole + toolCard.Role = cfg.AssistantRole + // logger = slog.New(slog.NewTextHandler(file, nil)) store = storage.NewProviderSQL("test.db", logger) // https://github.com/coreydaley/ggerganov-llama.cpp/blob/master/examples/server/README.md diff --git a/config.example.toml b/config.example.toml index d1388e5..888de1a 100644 --- a/config.example.toml +++ b/config.example.toml @@ -7,3 +7,4 @@ AssistantRole = "assistant" AssistantIcon = "<🤖>: " UserIcon = "<user>: " ToolIcon = "<ï‚>>: " +SysDir = "sysprompts" diff --git a/config/config.go b/config/config.go index 7b6cfc0..0c62e76 100644 --- a/config/config.go +++ b/config/config.go @@ -17,6 +17,7 @@ type Config struct { UserIcon string `toml:"UserIcon"` ToolIcon string `toml:"ToolIcon"` ChunkLimit uint32 `toml:"ChunkLimit"` + SysDir string `toml:"SysDir"` } func LoadConfigOrDefault(fn string) *Config { @@ -34,6 +35,7 @@ func LoadConfigOrDefault(fn string) *Config { config.ToolRole = "tool" config.AssistantRole = "assistant" config.ChunkLimit = 8192 + config.SysDir = "sysprompts" } return config } diff --git a/models/card.go b/models/card.go new file mode 100644 index 0000000..24226d3 --- /dev/null +++ b/models/card.go @@ -0,0 +1,41 @@ +package models + +import "strings" + +// https://github.com/malfoyslastname/character-card-spec-v2/blob/main/spec_v2.md +// what a bloat; trim to Role->Msg pair and first msg +type CharCardSpec struct { + Name string `json:"name"` + Description string `json:"description"` + Personality string `json:"personality"` + FirstMes string `json:"first_mes"` + Avatar string `json:"avatar"` + Chat string `json:"chat"` + MesExample string `json:"mes_example"` + Scenario string `json:"scenario"` + CreateDate string `json:"create_date"` + Talkativeness string `json:"talkativeness"` + Fav bool `json:"fav"` + Creatorcomment string `json:"creatorcomment"` + Spec string `json:"spec"` + SpecVersion string `json:"spec_version"` + Tags []any `json:"tags"` +} + +func (c *CharCardSpec) Simplify(userName, fpath string) *CharCard { + fm := strings.ReplaceAll(strings.ReplaceAll(c.FirstMes, "{{char}}", c.Name), "{{user}}", userName) + sysPr := strings.ReplaceAll(strings.ReplaceAll(c.Description, "{{char}}", c.Name), "{{user}}", userName) + return &CharCard{ + SysPrompt: sysPr, + FirstMsg: fm, + Role: c.Name, + FilePath: fpath, + } +} + +type CharCard struct { + SysPrompt string + FirstMsg string + Role string + FilePath string +} diff --git a/models/db.go b/models/db.go index 5f49003..4f52f68 100644 --- a/models/db.go +++ b/models/db.go @@ -8,13 +8,13 @@ import ( type Chat struct { ID uint32 `db:"id" json:"id"` Name string `db:"name" json:"name"` - Msgs string `db:"msgs" json:"msgs"` // []MessagesStory to string json + Msgs string `db:"msgs" json:"msgs"` // []RoleMsg to string json CreatedAt time.Time `db:"created_at" json:"created_at"` UpdatedAt time.Time `db:"updated_at" json:"updated_at"` } -func (c Chat) ToHistory() ([]MessagesStory, error) { - resp := []MessagesStory{} +func (c Chat) ToHistory() ([]RoleMsg, error) { + resp := []RoleMsg{} if err := json.Unmarshal([]byte(c.Msgs), &resp); err != nil { return nil, err } diff --git a/models/models.go b/models/models.go index 02bec00..0373e9b 100644 --- a/models/models.go +++ b/models/models.go @@ -5,12 +5,6 @@ import ( "strings" ) -// type FuncCall struct { -// XMLName xml.Name `xml:"tool_call"` -// Name string `xml:"name"` -// Args []string `xml:"args"` -// } - type FuncCall struct { Name string `json:"name"` Args []string `json:"args"` @@ -56,12 +50,12 @@ type LLMRespChunk struct { } `json:"usage"` } -type MessagesStory struct { +type RoleMsg struct { Role string `json:"role"` Content string `json:"content"` } -func (m MessagesStory) ToText(i int) string { +func (m RoleMsg) ToText(i int) string { icon := "" switch m.Role { case "assistant": @@ -72,20 +66,22 @@ func (m MessagesStory) ToText(i int) string { icon = fmt.Sprintf("(%d) <system>: ", i) case "tool": icon = fmt.Sprintf("(%d) <tool>: ", i) + default: + icon = fmt.Sprintf("(%d) <%s>: ", i, m.Role) } textMsg := fmt.Sprintf("%s%s\n", icon, m.Content) return strings.ReplaceAll(textMsg, "\n\n", "\n") } type ChatBody struct { - Model string `json:"model"` - Stream bool `json:"stream"` - Messages []MessagesStory `json:"messages"` + Model string `json:"model"` + Stream bool `json:"stream"` + Messages []RoleMsg `json:"messages"` } type ChatToolsBody struct { - Model string `json:"model"` - Messages []MessagesStory `json:"messages"` + Model string `json:"model"` + Messages []RoleMsg `json:"messages"` Tools []struct { Type string `json:"type"` Function struct { diff --git a/pngmeta/metareader.go b/pngmeta/metareader.go new file mode 100644 index 0000000..ea726c9 --- /dev/null +++ b/pngmeta/metareader.go @@ -0,0 +1,107 @@ +package pngmeta + +import ( + "bytes" + "elefant/models" + "encoding/base64" + "encoding/json" + "errors" + "io" + "os" + "path" + "strings" +) + +const ( + embType = "tEXt" +) + +type PngEmbed struct { + Key string + Value string +} + +func (c PngEmbed) GetDecodedValue() (*models.CharCardSpec, error) { + data, err := base64.StdEncoding.DecodeString(c.Value) + if err != nil { + return nil, err + } + card := &models.CharCardSpec{} + if err := json.Unmarshal(data, &card); err != nil { + return nil, err + } + return card, nil +} + +func extractChar(fname string) (*PngEmbed, error) { + data, err := os.ReadFile(fname) + if err != nil { + return nil, err + } + reader := bytes.NewReader(data) + pr, err := NewPNGStepReader(reader) + if err != nil { + return nil, err + } + for { + step, err := pr.Next() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + } + if step.Type() != embType { + if _, err := io.Copy(io.Discard, step); err != nil { + return nil, err + } + } else { + buf, err := io.ReadAll(step) + if err != nil { + return nil, err + } + dataInstep := string(buf) + values := strings.Split(dataInstep, "\x00") + if len(values) == 2 { + return &PngEmbed{Key: values[0], Value: values[1]}, nil + } + } + if err := step.Close(); err != nil { + return nil, err + } + } + return nil, errors.New("failed to find embedded char in png: " + fname) +} + +func ReadCard(fname, uname string) (*models.CharCard, error) { + pe, err := extractChar(fname) + if err != nil { + return nil, err + } + charSpec, err := pe.GetDecodedValue() + if err != nil { + return nil, err + } + return charSpec.Simplify(uname, fname), nil +} + +func ReadDirCards(dirname, uname string) ([]*models.CharCard, error) { + files, err := os.ReadDir(dirname) + if err != nil { + return nil, err + } + resp := []*models.CharCard{} + for _, f := range files { + if !strings.HasSuffix(f.Name(), ".png") { + continue + } + fpath := path.Join(dirname, f.Name()) + cc, err := ReadCard(fpath, uname) + if err != nil { + // log err + return nil, err + // continue + } + resp = append(resp, cc) + } + return resp, nil +} diff --git a/pngmeta/metareader_test.go b/pngmeta/metareader_test.go new file mode 100644 index 0000000..51dadc6 --- /dev/null +++ b/pngmeta/metareader_test.go @@ -0,0 +1,33 @@ +package pngmeta + +import ( + "fmt" + "testing" +) + +func TestReadMeta(t *testing.T) { + cases := []struct { + Filename string + }{ + { + Filename: "../sysprompts/default_Seraphina.png", + }, + { + Filename: "../sysprompts/llama.png", + }, + } + for i, tc := range cases { + t.Run(fmt.Sprintf("test_%d", i), func(t *testing.T) { + // Call the readMeta function + pembed, err := extractChar(tc.Filename) + if err != nil { + t.Errorf("Expected no error, but got %v", err) + } + v, err := pembed.GetDecodedValue() + if err != nil { + t.Errorf("Expected no error, but got %v\n", err) + } + fmt.Printf("%+v\n", v.Simplify("Adam")) + }) + } +} diff --git a/pngmeta/partsreader.go b/pngmeta/partsreader.go new file mode 100644 index 0000000..b69e4c3 --- /dev/null +++ b/pngmeta/partsreader.go @@ -0,0 +1,77 @@ +package pngmeta + +import ( + "encoding/binary" + "errors" + "hash" + "hash/crc32" + "io" +) + +var ( + ErrCRC32Mismatch = errors.New("crc32 mismatch") + ErrNotPNG = errors.New("not png") + ErrBadLength = errors.New("bad length") +) + +const header = "\x89PNG\r\n\x1a\n" + +type PngChunk struct { + typ string + length int32 + r io.Reader + realR io.Reader + checksummer hash.Hash32 +} + +func (c *PngChunk) Read(p []byte) (int, error) { + return io.TeeReader(c.r, c.checksummer).Read(p) +} + +func (c *PngChunk) Close() error { + var crc32 uint32 + if err := binary.Read(c.realR, binary.BigEndian, &crc32); err != nil { + return err + } + if crc32 != c.checksummer.Sum32() { + return ErrCRC32Mismatch + } + return nil +} + +func (c *PngChunk) Type() string { + return c.typ +} + +type Reader struct { + r io.Reader +} + +func NewPNGStepReader(r io.Reader) (*Reader, error) { + expectedHeader := make([]byte, len(header)) + if _, err := io.ReadFull(r, expectedHeader); err != nil { + return nil, err + } + if string(expectedHeader) != header { + return nil, ErrNotPNG + } + return &Reader{r}, nil +} + +func (r *Reader) Next() (*PngChunk, error) { + var length int32 + if err := binary.Read(r.r, binary.BigEndian, &length); err != nil { + return nil, err + } + if length < 0 { + return nil, ErrBadLength + } + var rawTyp [4]byte + if _, err := io.ReadFull(r.r, rawTyp[:]); err != nil { + return nil, err + } + typ := string(rawTyp[:]) + checksummer := crc32.NewIEEE() + checksummer.Write([]byte(typ)) + return &PngChunk{typ, length, io.LimitReader(r.r, int64(length)), r.r, checksummer}, nil +} @@ -14,7 +14,7 @@ var ( chatMap = make(map[string]*models.Chat) ) -func historyToSJSON(msgs []models.MessagesStory) (string, error) { +func historyToSJSON(msgs []models.RoleMsg) (string, error) { data, err := json.Marshal(msgs) if err != nil { return "", err @@ -25,7 +25,7 @@ func historyToSJSON(msgs []models.MessagesStory) (string, error) { return string(data), nil } -func updateStorageChat(name string, msgs []models.MessagesStory) error { +func updateStorageChat(name string, msgs []models.RoleMsg) error { var err error chat, ok := chatMap[name] if !ok { @@ -59,7 +59,7 @@ func loadHistoryChats() ([]string, error) { return resp, nil } -func loadHistoryChat(chatName string) ([]models.MessagesStory, error) { +func loadHistoryChat(chatName string) ([]models.RoleMsg, error) { chat, ok := chatMap[chatName] if !ok { err := errors.New("failed to read chat") @@ -70,7 +70,7 @@ func loadHistoryChat(chatName string) ([]models.MessagesStory, error) { return chat.ToHistory() } -func loadOldChatOrGetNew() []models.MessagesStory { +func loadOldChatOrGetNew() []models.RoleMsg { newChat := &models.Chat{ ID: 0, CreatedAt: time.Now(), diff --git a/sysprompts/default_Seraphina.png b/sysprompts/default_Seraphina.png Binary files differnew file mode 100644 index 0000000..14f3c14 --- /dev/null +++ b/sysprompts/default_Seraphina.png diff --git a/sysprompts/llama.png b/sysprompts/llama.png Binary files differnew file mode 100644 index 0000000..7317300 --- /dev/null +++ b/sysprompts/llama.png @@ -10,8 +10,8 @@ import ( var ( // TODO: form that message based on existing funcs - basicSysMsg = `Large Language Model that helps user with any of his requests.` toolCallRE = regexp.MustCompile(`__tool_call__\s*([\s\S]*?)__tool_call__`) + basicSysMsg = `Large Language Model that helps user with any of his requests.` toolSysMsg = `You're a helpful assistant. # Tools You can do functions call if needed. @@ -47,8 +47,20 @@ Tool call is addressed to the tool agent, avoid sending more info than tool call When done right, tool call will be delivered to the tool agent. tool agent will respond with the results of the call. After that you are free to respond to the user. ` - systemMsg = toolSysMsg - sysMap = map[string]string{"basic_sys": basicSysMsg, "tool_sys": toolSysMsg} + basicCard = &models.CharCard{ + SysPrompt: basicSysMsg, + FirstMsg: defaultFirstMsg, + Role: "", + FilePath: "", + } + toolCard = &models.CharCard{ + SysPrompt: toolSysMsg, + FirstMsg: defaultFirstMsg, + Role: "", + FilePath: "", + } + // sysMap = map[string]string{"basic_sys": basicSysMsg, "tool_sys": toolSysMsg} + sysMap = map[string]*models.CharCard{"basic_sys": basicCard, "tool_sys": toolCard} sysLabels = []string{"cancel", "basic_sys", "tool_sys"} ) @@ -2,6 +2,7 @@ package main import ( "elefant/models" + "elefant/pngmeta" "fmt" "strconv" "time" @@ -119,22 +120,27 @@ func init() { }) sysModal = tview.NewModal(). SetText("Switch sys msg:"). - AddButtons(sysLabels). SetDoneFunc(func(buttonIndex int, buttonLabel string) { switch buttonLabel { case "cancel": pages.RemovePage("sys") return default: - sysMsg, ok := sysMap[buttonLabel] + cc, ok := sysMap[buttonLabel] if !ok { logger.Warn("no such sys msg", "name", buttonLabel) pages.RemovePage("sys") return } - chatBody.Messages[0].Content = sysMsg + // to replace it old role in text + // oldRole := chatBody.Messages[0].Role + // replace every role with char + // chatBody.Messages[0].Content = cc.SysPrompt + // chatBody.Messages[1].Content = cc.FirstMsg + applyCharCard(cc) // replace textview textView.SetText(chatToText(cfg.ShowSys)) + sysModal.ClearButtons() pages.RemovePage("sys") } }) @@ -312,6 +318,22 @@ func init() { } if event.Key() == tcell.KeyCtrlS { // switch sys prompt + cards, err := pngmeta.ReadDirCards(cfg.SysDir, cfg.UserRole) + if err != nil { + logger.Error("failed to read sys dir", "error", err) + if err := notifyUser("error", "failed to read: "+cfg.SysDir); err != nil { + logger.Debug("failed to notify user", "error", err) + } + return nil + } + labels := []string{} + labels = append(labels, sysLabels...) + for _, cc := range cards { + labels = append(labels, cc.Role) + sysMap[cc.Role] = cc + } + sysModal.AddButtons(labels) + // load all chars pages.AddPage("sys", sysModal, true, true) return nil } |