summaryrefslogtreecommitdiff
path: root/helpfuncs.go
diff options
context:
space:
mode:
authorGrail Finder <wohilas@gmail.com>2026-02-20 20:11:52 +0300
committerGrail Finder <wohilas@gmail.com>2026-02-20 20:11:52 +0300
commit61a0ddfdfd266fa533abf6d3bb8f000f1214a2cd (patch)
treea5270491560b209d15afc46c1f01c3dc485857e5 /helpfuncs.go
parent26ab5c59e31f18b3b8bd7b34fed751a3de179056 (diff)
Fix: stop making http request per each keypress
Diffstat (limited to 'helpfuncs.go')
-rw-r--r--helpfuncs.go62
1 files changed, 46 insertions, 16 deletions
diff --git a/helpfuncs.go b/helpfuncs.go
index 55df7c1..d8c28af 100644
--- a/helpfuncs.go
+++ b/helpfuncs.go
@@ -12,6 +12,7 @@ import (
"path/filepath"
"slices"
"strings"
+ "time"
"unicode"
"math/rand/v2"
@@ -19,6 +20,46 @@ import (
"github.com/rivo/tview"
)
+// Cached model color - updated by background goroutine
+var cachedModelColor string = "orange"
+
+// startModelColorUpdater starts a background goroutine that periodically updates
+// the cached model color. Only runs HTTP requests for local llama.cpp APIs.
+func startModelColorUpdater() {
+ go func() {
+ ticker := time.NewTicker(5 * time.Second)
+ defer ticker.Stop()
+
+ // Initial check
+ updateCachedModelColor()
+
+ for range ticker.C {
+ updateCachedModelColor()
+ }
+ }()
+}
+
+// updateCachedModelColor updates the global cachedModelColor variable
+func updateCachedModelColor() {
+ if !isLocalLlamacpp() {
+ cachedModelColor = "orange"
+ return
+ }
+
+ // Check if model is loaded
+ loaded, err := isModelLoaded(chatBody.Model)
+ if err != nil {
+ // On error, assume not loaded (red)
+ cachedModelColor = "red"
+ return
+ }
+ if loaded {
+ cachedModelColor = "green"
+ } else {
+ cachedModelColor = "red"
+ }
+}
+
func isASCII(s string) bool {
for i := 0; i < len(s); i++ {
if s[i] > unicode.MaxASCII {
@@ -132,8 +173,8 @@ func colorText() {
}
func updateStatusLine() {
- statusLineWidget.SetText(makeStatusLine())
- helpView.SetText(fmt.Sprintf(helpText, makeStatusLine()))
+ status := makeStatusLine()
+ statusLineWidget.SetText(status)
}
func initSysCards() ([]string, error) {
@@ -275,22 +316,11 @@ func isLocalLlamacpp() bool {
return host == "localhost" || host == "127.0.0.1" || host == "::1"
}
-// getModelColor returns the color tag for the model name based on its load status.
+// getModelColor returns the cached color tag for the model name.
+// The cached value is updated by a background goroutine every 5 seconds.
// For non-local models, returns orange. For local llama.cpp models, returns green if loaded, red if not.
func getModelColor() string {
- if !isLocalLlamacpp() {
- return "orange"
- }
- // Check if model is loaded
- loaded, err := isModelLoaded(chatBody.Model)
- if err != nil {
- // On error, assume not loaded (red)
- return "red"
- }
- if loaded {
- return "green"
- }
- return "red"
+ return cachedModelColor
}
func makeStatusLine() string {