diff options
| author | Grail Finder <wohilas@gmail.com> | 2026-03-07 18:42:12 +0300 |
|---|---|---|
| committer | Grail Finder <wohilas@gmail.com> | 2026-03-07 18:42:12 +0300 |
| commit | bf655a10875630a6fe5f283340b6d390a1920b58 (patch) | |
| tree | 56b363f3638bbe0183bab60423f78843200b8951 /bot.go | |
| parent | c8f00198d6f0ad66269753252f56485ee346d413 (diff) | |
Enha: llama.cpp on non localhost
Diffstat (limited to 'bot.go')
| -rw-r--r-- | bot.go | 37 |
1 files changed, 16 insertions, 21 deletions
@@ -16,7 +16,6 @@ import ( "log/slog" "net" "net/http" - "net/url" "os" "regexp" "slices" @@ -253,12 +252,7 @@ func createClient(connectTimeout time.Duration) *http.Client { } func warmUpModel() { - u, err := url.Parse(cfg.CurrentAPI) - if err != nil { - return - } - host := u.Hostname() - if host != "localhost" && host != "127.0.0.1" && host != "::1" { + if !isLocalLlamacpp() { return } // Check if model is already loaded @@ -1404,20 +1398,21 @@ func updateModelLists() { time.Sleep(time.Millisecond * 100) } // set already loaded model in llama.cpp - if strings.Contains(cfg.CurrentAPI, "localhost") || strings.Contains(cfg.CurrentAPI, "127.0.0.1") { - localModelsMu.Lock() - defer localModelsMu.Unlock() - for i := range LocalModels { - if strings.Contains(LocalModels[i], models.LoadedMark) { - m := strings.TrimPrefix(LocalModels[i], models.LoadedMark) - cfg.CurrentModel = m - chatBody.Model = m - cachedModelColor = "green" - updateStatusLine() - updateToolCapabilities() - app.Draw() - return - } + if !isLocalLlamacpp() { + return + } + localModelsMu.Lock() + defer localModelsMu.Unlock() + for i := range LocalModels { + if strings.Contains(LocalModels[i], models.LoadedMark) { + m := strings.TrimPrefix(LocalModels[i], models.LoadedMark) + cfg.CurrentModel = m + chatBody.Model = m + cachedModelColor = "green" + updateStatusLine() + updateToolCapabilities() + app.Draw() + return } } } |
