diff options
| author | Grail Finder <wohilas@gmail.com> | 2026-03-01 08:22:02 +0300 |
|---|---|---|
| committer | Grail Finder <wohilas@gmail.com> | 2026-03-01 08:22:02 +0300 |
| commit | cdfccf9a2440dc4d8094e7ae94aa85cb446e7cfb (patch) | |
| tree | 43658a89dd2fbd89106cf58c6a4a48792f4f97ac | |
| parent | 1f112259d2ff58bbe25f25f8c65694d5a7569e68 (diff) | |
Enha (llama.cpp): show loaded model on startup
| -rw-r--r-- | bot.go | 28 | ||||
| -rw-r--r-- | helpfuncs.go | 2 | ||||
| -rw-r--r-- | models/consts.go | 12 | ||||
| -rw-r--r-- | models/models.go | 25 | ||||
| -rw-r--r-- | popups.go | 5 | ||||
| -rw-r--r-- | tools.go | 8 |
6 files changed, 37 insertions, 43 deletions
@@ -379,22 +379,22 @@ func fetchLCPModels() ([]string, error) { // fetchLCPModelsWithLoadStatus returns models with "(loaded)" indicator for loaded models func fetchLCPModelsWithLoadStatus() ([]string, error) { - models, err := fetchLCPModelsWithStatus() + modelList, err := fetchLCPModelsWithStatus() if err != nil { return nil, err } - result := make([]string, 0, len(models.Data)) + result := make([]string, 0, len(modelList.Data)) li := 0 // loaded index - for i, m := range models.Data { + for i, m := range modelList.Data { modelName := m.ID if m.Status.Value == "loaded" { - modelName = "(loaded) " + modelName + modelName = models.LoadedMark + modelName li = i } result = append(result, modelName) } if li == 0 { - return result, nil // no loaded models + return result, nil // no loaded modelList } loadedModel := result[li] result = append(result[:li], result[li+1:]...) @@ -1323,11 +1323,27 @@ func updateModelLists() { } // if llama.cpp started after gf-lt? localModelsMu.Lock() - LocalModels, err = fetchLCPModels() + LocalModels, err = fetchLCPModelsWithLoadStatus() localModelsMu.Unlock() if err != nil { logger.Warn("failed to fetch llama.cpp models", "error", err) } + // set already loaded model in llama.cpp + if strings.Contains(cfg.CurrentAPI, "localhost") || strings.Contains(cfg.CurrentAPI, "127.0.0.1") { + localModelsMu.Lock() + defer localModelsMu.Unlock() + for i := range LocalModels { + if strings.Contains(LocalModels[i], models.LoadedMark) { + m := strings.TrimPrefix(LocalModels[i], models.LoadedMark) + cfg.CurrentModel = m + chatBody.Model = m + cachedModelColor = "green" + updateStatusLine() + app.Draw() + return + } + } + } } func refreshLocalModelsIfEmpty() { diff --git a/helpfuncs.go b/helpfuncs.go index b8b7251..b63995c 100644 --- a/helpfuncs.go +++ b/helpfuncs.go @@ -27,7 +27,6 @@ func startModelColorUpdater() { go func() { ticker := time.NewTicker(5 * time.Second) defer ticker.Stop() - // Initial check updateCachedModelColor() for range ticker.C { @@ -42,7 +41,6 @@ func updateCachedModelColor() { cachedModelColor = "orange" return } - // Check if model is loaded loaded, err := isModelLoaded(chatBody.Model) if err != nil { diff --git a/models/consts.go b/models/consts.go new file mode 100644 index 0000000..4f61435 --- /dev/null +++ b/models/consts.go @@ -0,0 +1,12 @@ +package models + +const ( + LoadedMark = "(loaded) " +) + +type APIType int + +const ( + APITypeChat APIType = iota + APITypeCompletion +) diff --git a/models/models.go b/models/models.go index 2f4b8b0..a35f16c 100644 --- a/models/models.go +++ b/models/models.go @@ -519,24 +519,6 @@ type OpenAIReq struct { // === -// type LLMModels struct { -// Object string `json:"object"` -// Data []struct { -// ID string `json:"id"` -// Object string `json:"object"` -// Created int `json:"created"` -// OwnedBy string `json:"owned_by"` -// Meta struct { -// VocabType int `json:"vocab_type"` -// NVocab int `json:"n_vocab"` -// NCtxTrain int `json:"n_ctx_train"` -// NEmbd int `json:"n_embd"` -// NParams int64 `json:"n_params"` -// Size int64 `json:"size"` -// } `json:"meta"` -// } `json:"data"` -// } - type LlamaCPPReq struct { Model string `json:"model"` Stream bool `json:"stream"` @@ -641,10 +623,3 @@ type ChatRoundReq struct { Regen bool Resume bool } - -type APIType int - -const ( - APITypeChat APIType = iota - APITypeCompletion -) @@ -1,6 +1,7 @@ package main import ( + "gf-lt/models" "slices" "strings" @@ -51,7 +52,7 @@ func showModelSelectionPopup() { // Find the current model index to set as selected currentModelIndex := -1 for i, model := range modelList { - if strings.TrimPrefix(model, "(loaded) ") == chatBody.Model { + if strings.TrimPrefix(model, models.LoadedMark) == chatBody.Model { currentModelIndex = i } modelListWidget.AddItem(model, "", 0, nil) @@ -61,7 +62,7 @@ func showModelSelectionPopup() { modelListWidget.SetCurrentItem(currentModelIndex) } modelListWidget.SetSelectedFunc(func(index int, mainText string, secondaryText string, shortcut rune) { - modelName := strings.TrimPrefix(mainText, "(loaded) ") + modelName := strings.TrimPrefix(mainText, models.LoadedMark) chatBody.Model = modelName cfg.CurrentModel = chatBody.Model pages.RemovePage("modelSelectionPopup") @@ -519,21 +519,17 @@ func fileEdit(args map[string]string) []byte { return []byte(msg) } path = resolvePath(path) - oldString, ok := args["oldString"] if !ok || oldString == "" { msg := "oldString not provided to file_edit tool" logger.Error(msg) return []byte(msg) } - newString, ok := args["newString"] if !ok { newString = "" } - lineNumberStr, hasLineNumber := args["lineNumber"] - // Read file content content, err := os.ReadFile(path) if err != nil { @@ -541,10 +537,8 @@ func fileEdit(args map[string]string) []byte { logger.Error(msg) return []byte(msg) } - fileContent := string(content) var replacementCount int - if hasLineNumber && lineNumberStr != "" { // Line-number based edit lineNum, err := strconv.Atoi(lineNumberStr) @@ -579,13 +573,11 @@ func fileEdit(args map[string]string) []byte { fileContent = strings.ReplaceAll(fileContent, oldString, newString) replacementCount = strings.Count(fileContent, newString) } - if err := os.WriteFile(path, []byte(fileContent), 0644); err != nil { msg := "failed to write file: " + err.Error() logger.Error(msg) return []byte(msg) } - msg := fmt.Sprintf("file edited successfully at %s (%d replacement(s))", path, replacementCount) return []byte(msg) } |
