summaryrefslogtreecommitdiff
path: root/bot.go
diff options
context:
space:
mode:
authorGrail Finder <wohilas@gmail.com>2026-03-01 08:22:02 +0300
committerGrail Finder <wohilas@gmail.com>2026-03-01 08:22:02 +0300
commitcdfccf9a2440dc4d8094e7ae94aa85cb446e7cfb (patch)
tree43658a89dd2fbd89106cf58c6a4a48792f4f97ac /bot.go
parent1f112259d2ff58bbe25f25f8c65694d5a7569e68 (diff)
Enha (llama.cpp): show loaded model on startup
Diffstat (limited to 'bot.go')
-rw-r--r--bot.go28
1 files changed, 22 insertions, 6 deletions
diff --git a/bot.go b/bot.go
index d35b4dd..53b3a84 100644
--- a/bot.go
+++ b/bot.go
@@ -379,22 +379,22 @@ func fetchLCPModels() ([]string, error) {
// fetchLCPModelsWithLoadStatus returns models with "(loaded)" indicator for loaded models
func fetchLCPModelsWithLoadStatus() ([]string, error) {
- models, err := fetchLCPModelsWithStatus()
+ modelList, err := fetchLCPModelsWithStatus()
if err != nil {
return nil, err
}
- result := make([]string, 0, len(models.Data))
+ result := make([]string, 0, len(modelList.Data))
li := 0 // loaded index
- for i, m := range models.Data {
+ for i, m := range modelList.Data {
modelName := m.ID
if m.Status.Value == "loaded" {
- modelName = "(loaded) " + modelName
+ modelName = models.LoadedMark + modelName
li = i
}
result = append(result, modelName)
}
if li == 0 {
- return result, nil // no loaded models
+ return result, nil // no loaded modelList
}
loadedModel := result[li]
result = append(result[:li], result[li+1:]...)
@@ -1323,11 +1323,27 @@ func updateModelLists() {
}
// if llama.cpp started after gf-lt?
localModelsMu.Lock()
- LocalModels, err = fetchLCPModels()
+ LocalModels, err = fetchLCPModelsWithLoadStatus()
localModelsMu.Unlock()
if err != nil {
logger.Warn("failed to fetch llama.cpp models", "error", err)
}
+ // set already loaded model in llama.cpp
+ if strings.Contains(cfg.CurrentAPI, "localhost") || strings.Contains(cfg.CurrentAPI, "127.0.0.1") {
+ localModelsMu.Lock()
+ defer localModelsMu.Unlock()
+ for i := range LocalModels {
+ if strings.Contains(LocalModels[i], models.LoadedMark) {
+ m := strings.TrimPrefix(LocalModels[i], models.LoadedMark)
+ cfg.CurrentModel = m
+ chatBody.Model = m
+ cachedModelColor = "green"
+ updateStatusLine()
+ app.Draw()
+ return
+ }
+ }
+ }
}
func refreshLocalModelsIfEmpty() {