diff options
| -rw-r--r-- | bot.go | 41 | ||||
| -rw-r--r-- | props_table.go | 12 |
2 files changed, 35 insertions, 18 deletions
@@ -826,6 +826,30 @@ func charToStart(agentName string) bool { return true } +func updateModelLists() { + var err error + if cfg.OpenRouterToken != "" { + ORFreeModels, err = fetchORModels(true) + if err != nil { + logger.Warn("failed to fetch or models", "error", err) + } + } + // if llama.cpp started after gf-lt? + LocalModels, err = fetchLCPModels() + if err != nil { + logger.Warn("failed to fetch llama.cpp models", "error", err) + } +} + +func updateModelListsTicker() { + updateModelLists() // run on the start + ticker := time.NewTicker(time.Minute * 1) + for { + <-ticker.C + updateModelLists() + } +} + func init() { var err error cfg, err = config.LoadConfig("config.toml") @@ -878,22 +902,6 @@ func init() { playerOrder = []string{cfg.UserRole, cfg.AssistantRole, cfg.CluedoRole2} cluedoState = extra.CluedoPrepCards(playerOrder) } - if cfg.OpenRouterToken != "" { - go func() { - ORModels, err := fetchORModels(true) - if err != nil { - logger.Error("failed to fetch or models", "error", err) - } else { - ORFreeModels = ORModels - } - }() - } - go func() { - LocalModels, err = fetchLCPModels() - if err != nil { - logger.Error("failed to fetch llama.cpp models", "error", err) - } - }() choseChunkParser() httpClient = createClient(time.Second * 15) if cfg.TTS_ENABLED { @@ -902,4 +910,5 @@ func init() { if cfg.STT_ENABLED { asr = extra.NewSTT(logger, cfg) } + go updateModelListsTicker() } diff --git a/props_table.go b/props_table.go index 7807522..dd359f4 100644 --- a/props_table.go +++ b/props_table.go @@ -4,6 +4,7 @@ import ( "fmt" "slices" "strconv" + "strings" "github.com/gdamore/tcell/v2" "github.com/rivo/tview" @@ -134,9 +135,16 @@ func makePropsTable(props map[string]float32) *tview.Table { addListPopupRow("Select an api", apiLinks, cfg.CurrentAPI, func(option string) { cfg.CurrentAPI = option }) + var modelList []string + // INFO: modelList is chosen based on current api link + if strings.Contains(cfg.CurrentAPI, "api.deepseek.com/") { + modelList = []string{chatBody.Model, "deepseek-chat", "deepseek-reasoner"} + } else if strings.Contains(cfg.CurrentAPI, "opentouter.ai") { + modelList = ORFreeModels + } else { // would match on localhost but what if llama.cpp served non localy? + modelList = LocalModels + } // Prepare model list dropdown - modelList := []string{chatBody.Model, "deepseek-chat", "deepseek-reasoner"} - modelList = append(modelList, ORFreeModels...) addListPopupRow("Select a model", modelList, chatBody.Model, func(option string) { chatBody.Model = option }) |
