summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGrail Finder <wohilas@gmail.com>2025-02-16 12:27:21 +0300
committerGrail Finder <wohilas@gmail.com>2025-02-16 12:27:21 +0300
commit58d632ed196ba8704cd79a0c9db773beb78ebee6 (patch)
tree26010a8e6c2d20a4086bfe7defb1e5cd67aed26e
parentc9f5b17f1fbfaa3647702496893ebd1a4204714e (diff)
Fix: tool use message
-rw-r--r--llm.go26
-rw-r--r--main.go2
2 files changed, 16 insertions, 12 deletions
diff --git a/llm.go b/llm.go
index 1289552..c9bebc9 100644
--- a/llm.go
+++ b/llm.go
@@ -28,8 +28,8 @@ type LlamaCPPeer struct {
type OpenAIer struct {
}
-func (lcp LlamaCPPeer) FormMsg(msg, role string, cont bool) (io.Reader, error) {
- if msg != "" { // otherwise let the bot continue
+func (lcp LlamaCPPeer) FormMsg(msg, role string, resume bool) (io.Reader, error) {
+ if msg != "" { // otherwise let the bot to continue
newMsg := models.RoleMsg{Role: role, Content: msg}
chatBody.Messages = append(chatBody.Messages, newMsg)
// if rag
@@ -43,16 +43,19 @@ func (lcp LlamaCPPeer) FormMsg(msg, role string, cont bool) (io.Reader, error) {
chatBody.Messages = append(chatBody.Messages, ragMsg)
}
}
+ if cfg.ToolUse && !resume {
+ // prompt += "\n" + cfg.ToolRole + ":\n" + toolSysMsg
+ // add to chat body
+ chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
+ }
messages := make([]string, len(chatBody.Messages))
for i, m := range chatBody.Messages {
messages[i] = m.ToPrompt()
}
prompt := strings.Join(messages, "\n")
// strings builder?
- if cfg.ToolUse && msg != "" && !cont {
- prompt += "\n" + cfg.ToolRole + ":\n" + toolSysMsg
- }
- if !cont {
+ // if cfg.ToolUse && msg != "" && !resume {
+ if !resume {
botMsgStart := "\n" + cfg.AssistantRole + ":\n"
prompt += botMsgStart
}
@@ -60,6 +63,7 @@ func (lcp LlamaCPPeer) FormMsg(msg, role string, cont bool) (io.Reader, error) {
if cfg.ThinkUse && !cfg.ToolUse {
prompt += "<think>"
}
+ logger.Info("checking prompt for llamacpp", "tool_use", cfg.ToolUse, "msg", msg, "resume", resume, "prompt", prompt)
payload := models.NewLCPReq(prompt, cfg, defaultLCPProps)
data, err := json.Marshal(payload)
if err != nil {
@@ -101,6 +105,11 @@ func (op OpenAIer) ParseChunk(data []byte) (string, bool, error) {
}
func (op OpenAIer) FormMsg(msg, role string, resume bool) (io.Reader, error) {
+ if cfg.ToolUse && !resume {
+ // prompt += "\n" + cfg.ToolRole + ":\n" + toolSysMsg
+ // add to chat body
+ chatBody.Messages = append(chatBody.Messages, models.RoleMsg{Role: cfg.ToolRole, Content: toolSysMsg})
+ }
if msg != "" { // otherwise let the bot continue
newMsg := models.RoleMsg{Role: role, Content: msg}
chatBody.Messages = append(chatBody.Messages, newMsg)
@@ -114,11 +123,6 @@ func (op OpenAIer) FormMsg(msg, role string, resume bool) (io.Reader, error) {
ragMsg := models.RoleMsg{Role: cfg.ToolRole, Content: ragResp}
chatBody.Messages = append(chatBody.Messages, ragMsg)
}
- if cfg.ToolUse {
- toolMsg := models.RoleMsg{Role: cfg.ToolRole,
- Content: toolSysMsg}
- chatBody.Messages = append(chatBody.Messages, toolMsg)
- }
}
data, err := json.Marshal(chatBody)
if err != nil {
diff --git a/main.go b/main.go
index 283f9a2..73275e8 100644
--- a/main.go
+++ b/main.go
@@ -12,7 +12,7 @@ var (
botRespMode = false
editMode = false
selectedIndex = int(-1)
- indexLine = "F12 to show keys help | bot resp mode: %v (F6) | char: %s (ctrl+s) | chat: %s (F1) | RAGEnabled: %v (F11) | toolUseAdviced: %v (ctrl+k) | model: %s (ctrl+l)\nAPI_URL: %s (ctrl+v) | ThinkUse: %v (ctrl+p) | Log Level: %v (ctrl+p)"
+ indexLine = "F12 to show keys help | bot resp mode: [orange:-:b]%v[-:-:-] (F6) | char: [orange:-:b]%s[-:-:-] (ctrl+s) | chat: [orange:-:b]%s[-:-:-] (F1) | RAGEnabled: [orange:-:b]%v[-:-:-] (F11) | toolUseAdviced: [orange:-:b]%v[-:-:-] (ctrl+k) | model: [orange:-:b]%s[-:-:-] (ctrl+l)\nAPI_URL: [orange:-:b]%s[-:-:-] (ctrl+v) | ThinkUse: [orange:-:b]%v[-:-:-] (ctrl+p) | Log Level: [orange:-:b]%v[-:-:-] (ctrl+p)"
focusSwitcher = map[tview.Primitive]tview.Primitive{}
)