summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--bot.go8
-rw-r--r--config/config.go1
-rw-r--r--llm.go11
-rw-r--r--main.go2
-rw-r--r--tools.go2
-rw-r--r--tui.go27
6 files changed, 40 insertions, 11 deletions
diff --git a/bot.go b/bot.go
index ccd0e8c..5a880ed 100644
--- a/bot.go
+++ b/bot.go
@@ -39,7 +39,7 @@ var (
chunkParser ChunkParser
defaultLCPProps = map[string]float32{
"temperature": 0.8,
- "dry_multiplier": 0.6,
+ "dry_multiplier": 0.0,
"min_p": 0.05,
"n_predict": -1.0,
}
@@ -108,6 +108,7 @@ func sendMsgToLLM(body io.Reader) {
}
// starts with -> data:
line = line[6:]
+ logger.Info("debugging resp", "line", string(line))
content, stop, err := chunkParser.ParseChunk(line)
if err != nil {
logger.Error("error parsing response body", "error", err, "line", string(line), "url", cfg.CurrentAPI)
@@ -185,6 +186,10 @@ func chatRound(userMsg, role string, tv *tview.TextView, regen bool) {
fmt.Fprintf(tv, "(%d) ", len(chatBody.Messages))
fmt.Fprint(tv, roleToIcon(cfg.AssistantRole))
fmt.Fprint(tv, "\n")
+ if cfg.ThinkUse && !strings.Contains(cfg.CurrentAPI, "v1") {
+ // fmt.Fprint(tv, "<think>")
+ chunkChan <- "<think>"
+ }
}
respText := strings.Builder{}
out:
@@ -201,6 +206,7 @@ out:
}
}
botRespMode = false
+ // how can previous messages be affected?
chatBody.Messages = append(chatBody.Messages, models.RoleMsg{
Role: cfg.AssistantRole, Content: respText.String(),
})
diff --git a/config/config.go b/config/config.go
index ddb0d91..f26a82e 100644
--- a/config/config.go
+++ b/config/config.go
@@ -17,6 +17,7 @@ type Config struct {
UserRole string `toml:"UserRole"`
ToolRole string `toml:"ToolRole"`
ToolUse bool `toml:"ToolUse"`
+ ThinkUse bool `toml:"ThinkUse"`
AssistantRole string `toml:"AssistantRole"`
SysDir string `toml:"SysDir"`
ChunkLimit uint32 `toml:"ChunkLimit"`
diff --git a/llm.go b/llm.go
index a5f70bf..89dedfc 100644
--- a/llm.go
+++ b/llm.go
@@ -30,9 +30,6 @@ type OpenAIer struct {
func (lcp LlamaCPPeer) FormMsg(msg, role string) (io.Reader, error) {
if msg != "" { // otherwise let the bot continue
- // if role == cfg.UserRole {
- // msg = msg + cfg.AssistantRole + ":"
- // }
newMsg := models.RoleMsg{Role: role, Content: msg}
chatBody.Messages = append(chatBody.Messages, newMsg)
// if rag
@@ -51,11 +48,17 @@ func (lcp LlamaCPPeer) FormMsg(msg, role string) (io.Reader, error) {
messages[i] = m.ToPrompt()
}
prompt := strings.Join(messages, "\n")
+ // strings builder?
if cfg.ToolUse && msg != "" {
prompt += "\n" + cfg.ToolRole + ":\n" + toolSysMsg
}
botMsgStart := "\n" + cfg.AssistantRole + ":\n"
- payload := models.NewLCPReq(prompt+botMsgStart, cfg, defaultLCPProps)
+ prompt += botMsgStart
+ // if cfg.ThinkUse && msg != "" && !cfg.ToolUse {
+ if cfg.ThinkUse && !cfg.ToolUse {
+ prompt += "<think>"
+ }
+ payload := models.NewLCPReq(prompt, cfg, defaultLCPProps)
data, err := json.Marshal(payload)
if err != nil {
logger.Error("failed to form a msg", "error", err)
diff --git a/main.go b/main.go
index 0b3bae5..e494b3a 100644
--- a/main.go
+++ b/main.go
@@ -12,7 +12,7 @@ var (
botRespMode = false
editMode = false
selectedIndex = int(-1)
- indexLine = "F12 to show keys help | bot resp mode: %v (F6) | char: %s (ctrl+s) | chat: %s (F1) | RAGEnabled: %v (F11) | toolUseAdviced: %v (ctrl+k) | model: %s (ctrl+l)\nAPI_URL: %s (ctrl+v)"
+ indexLine = "F12 to show keys help | bot resp mode: %v (F6) | char: %s (ctrl+s) | chat: %s (F1) | RAGEnabled: %v (F11) | toolUseAdviced: %v (ctrl+k) | model: %s (ctrl+l)\nAPI_URL: %s (ctrl+v) | ThinkUse: %v (ctrl+p)"
focusSwitcher = map[tview.Primitive]tview.Primitive{}
)
diff --git a/tools.go b/tools.go
index 1489294..a380bf5 100644
--- a/tools.go
+++ b/tools.go
@@ -12,7 +12,7 @@ var (
toolCallRE = regexp.MustCompile(`__tool_call__\s*([\s\S]*?)__tool_call__`)
quotesRE = regexp.MustCompile(`(".*?")`)
starRE = regexp.MustCompile(`(\*.*?\*)`)
- thinkRE = regexp.MustCompile(`(<think>.*?</think>)`)
+ thinkRE = regexp.MustCompile(`(<think>\s*([\s\S]*?)</think>)`)
codeBlockRE = regexp.MustCompile(`(?s)\x60{3}(?:.*?)\n(.*?)\n\s*\x60{3}\s*`)
basicSysMsg = `Large Language Model that helps user with any of his requests.`
toolSysMsg = `You can do functions call if needed.
diff --git a/tui.go b/tui.go
index c8151b4..948e363 100644
--- a/tui.go
+++ b/tui.go
@@ -65,7 +65,7 @@ var (
[yellow]Ctrl+t[white]: remove thinking (<think>) and tool messages from context (delete from chat)
[yellow]Ctrl+l[white]: update connected model name (llamacpp)
[yellow]Ctrl+k[white]: switch tool use (recommend tool use to llm after user msg)
-[yellow]Ctrl+i[white]: if chat agent is char.png will show the image; then any key to return
+[yellow]Ctrl+j[white]: if chat agent is char.png will show the image; then any key to return
Press Enter to go back
`
@@ -97,6 +97,10 @@ func colorText() {
var codeBlocks []string
placeholder := "__CODE_BLOCK_%d__"
counter := 0
+ // thinking
+ var thinkBlocks []string
+ placeholderThink := "__THINK_BLOCK_%d__"
+ counterThink := 0
// Replace code blocks with placeholders and store their styled versions
text = codeBlockRE.ReplaceAllStringFunc(text, func(match string) string {
// Style the code block and store it
@@ -107,19 +111,31 @@ func colorText() {
counter++
return id
})
+ text = thinkRE.ReplaceAllStringFunc(text, func(match string) string {
+ // Style the code block and store it
+ styled := fmt.Sprintf("[red::i]%s[-:-:-]", match)
+ thinkBlocks = append(codeBlocks, styled)
+ // Generate a unique placeholder (e.g., "__CODE_BLOCK_0__")
+ id := fmt.Sprintf(placeholderThink, counterThink)
+ counter++
+ return id
+ })
// Step 2: Apply other regex styles to the non-code parts
text = quotesRE.ReplaceAllString(text, `[orange::-]$1[-:-:-]`)
text = starRE.ReplaceAllString(text, `[turquoise::i]$1[-:-:-]`)
- text = thinkRE.ReplaceAllString(text, `[turquoise::i]$1[-:-:-]`)
+ // text = thinkRE.ReplaceAllString(text, `[yellow::i]$1[-:-:-]`)
// Step 3: Restore the styled code blocks from placeholders
for i, cb := range codeBlocks {
text = strings.Replace(text, fmt.Sprintf(placeholder, i), cb, 1)
}
+ for i, tb := range thinkBlocks {
+ text = strings.Replace(text, fmt.Sprintf(placeholderThink, i), tb, 1)
+ }
textView.SetText(text)
}
func updateStatusLine() {
- position.SetText(fmt.Sprintf(indexLine, botRespMode, cfg.AssistantRole, activeChatName, cfg.RAGEnabled, cfg.ToolUse, currentModel, cfg.CurrentAPI))
+ position.SetText(fmt.Sprintf(indexLine, botRespMode, cfg.AssistantRole, activeChatName, cfg.RAGEnabled, cfg.ToolUse, currentModel, cfg.CurrentAPI, cfg.ThinkUse))
}
func initSysCards() ([]string, error) {
@@ -167,6 +183,9 @@ func startNewChat() {
func makePropsForm(props map[string]float32) *tview.Form {
form := tview.NewForm().
AddTextView("Notes", "Props for llamacpp completion call", 40, 2, true, false).
+ AddCheckbox("Insert <think> (/completion only)", cfg.ThinkUse, func(checked bool) {
+ cfg.ThinkUse = checked
+ }).
AddButton("Quit", func() {
pages.RemovePage(propsPage)
})
@@ -588,7 +607,7 @@ func init() {
updateStatusLine()
return nil
}
- if event.Key() == tcell.KeyCtrlI {
+ if event.Key() == tcell.KeyCtrlJ {
// show image
loadImage()
pages.AddPage(imgPage, imgView, true, true)