summaryrefslogtreecommitdiff
path: root/tools.go
diff options
context:
space:
mode:
authorGrail Finder <wohilas@gmail.com>2025-10-19 11:16:25 +0300
committerGrail Finder <wohilas@gmail.com>2025-10-19 11:16:25 +0300
commite7fa9ccf815096bc086258aab2a033b65a3f5b82 (patch)
treef2cdf54a4f920f52c8508b1c56276c2689cfc6bb /tools.go
parent5d2ce7a5f5743fa39b43379b143e0ee9a908ada6 (diff)
Feat: import searchagent for extra websearch
Diffstat (limited to 'tools.go')
-rw-r--r--tools.go63
1 files changed, 43 insertions, 20 deletions
diff --git a/tools.go b/tools.go
index c1b84ab..9ee0db9 100644
--- a/tools.go
+++ b/tools.go
@@ -1,14 +1,14 @@
package main
import (
- "bytes"
+ "context"
"encoding/json"
"fmt"
"gf-lt/config"
+ "gf-lt/extra"
"gf-lt/models"
- "io"
- "net/http"
"regexp"
+ "strconv"
"strings"
"time"
)
@@ -116,29 +116,52 @@ func websearch(args map[string]string) []byte {
logger.Error(msg)
return []byte(msg)
}
- payload, err := json.Marshal(args)
- if err != nil {
- logger.Error("failed to marshal web_search arguments", "error", err)
- msg := fmt.Sprintf("failed to marshal web_search arguments; error: %s\n", err)
- return []byte(msg)
+ limitS, ok := args["limit"]
+ if !ok || limitS == "" {
+ limitS = "3"
}
- req, err := http.NewRequest("POST", cfg.SearchAPI, bytes.NewReader(payload))
- if err != nil {
- logger.Error("failed to build an http request", "error", err)
- msg := fmt.Sprintf("failed to build an http request; error: %s\n", err)
- return []byte(msg)
+ limit, err := strconv.Atoi(limitS)
+ if err != nil || limit == 0 {
+ logger.Warn("websearch limit; passed bad value; setting to default (3)",
+ "limit_arg", limitS, "error", err)
+ limit = 3
}
- resp, err := httpClient.Do(req)
+ // // external
+ // payload, err := json.Marshal(args)
+ // if err != nil {
+ // logger.Error("failed to marshal web_search arguments", "error", err)
+ // msg := fmt.Sprintf("failed to marshal web_search arguments; error: %s\n", err)
+ // return []byte(msg)
+ // }
+ // req, err := http.NewRequest("POST", cfg.SearchAPI, bytes.NewReader(payload))
+ // if err != nil {
+ // logger.Error("failed to build an http request", "error", err)
+ // msg := fmt.Sprintf("failed to build an http request; error: %s\n", err)
+ // return []byte(msg)
+ // }
+ // resp, err := httpClient.Do(req)
+ // if err != nil {
+ // logger.Error("failed to execute http request", "error", err)
+ // msg := fmt.Sprintf("failed to execute http request; error: %s\n", err)
+ // return []byte(msg)
+ // }
+ // defer resp.Body.Close()
+ // data, err := io.ReadAll(resp.Body)
+ // if err != nil {
+ // logger.Error("failed to read response body", "error", err)
+ // msg := fmt.Sprintf("failed to read response body; error: %s\n", err)
+ // return []byte(msg)
+ // }
+ resp, err := extra.WebSearcher.Search(context.Background(), query, limit)
if err != nil {
- logger.Error("failed to execute http request", "error", err)
- msg := fmt.Sprintf("failed to execute http request; error: %s\n", err)
+ msg := "search tool failed; error: " + err.Error()
+ logger.Error(msg)
return []byte(msg)
}
- defer resp.Body.Close()
- data, err := io.ReadAll(resp.Body)
+ data, err := json.Marshal(resp)
if err != nil {
- logger.Error("failed to read response body", "error", err)
- msg := fmt.Sprintf("failed to read response body; error: %s\n", err)
+ msg := "failed to marshal search result; error: " + err.Error()
+ logger.Error(msg)
return []byte(msg)
}
return data