This commit is contained in:
lpf
2026-02-13 13:50:09 +08:00
parent f88a78ef8b
commit 085c265319
15 changed files with 1485 additions and 179 deletions

View File

@@ -3,18 +3,23 @@ package tools
import (
"context"
"fmt"
"os/exec"
"path/filepath"
"time"
"clawgo/pkg/browser"
)
type BrowserTool struct {
chromePath string
timeout time.Duration
client *browser.Browser
}
func NewBrowserTool() *BrowserTool {
client := browser.New()
timeout := 30 * time.Second
client.SetTimeout(timeout)
return &BrowserTool{
timeout: 30 * time.Second,
client: client,
}
}
@@ -57,35 +62,22 @@ func (t *BrowserTool) Execute(ctx context.Context, args map[string]interface{})
}
func (t *BrowserTool) takeScreenshot(ctx context.Context, url string) (string, error) {
// 基于 CLI 的简单实现:使用 chromium-browser --headless
outputPath := fmt.Sprintf("/tmp/screenshot_%d.png", time.Now().UnixNano())
cmd := exec.CommandContext(ctx, "chromium-browser",
"--headless",
"--disable-gpu",
"--no-sandbox",
"--screenshot="+outputPath,
url)
if err := cmd.Run(); err != nil {
return "", fmt.Errorf("failed to take screenshot: %w (ensure chromium-browser is installed)", err)
if !t.client.Available() {
return "", fmt.Errorf("failed to take screenshot: no chromium-compatible browser available")
}
return fmt.Sprintf("Screenshot saved to: %s", outputPath), nil
if err := t.client.Screenshot(ctx, url, outputPath); err != nil {
return "", err
}
return fmt.Sprintf("Screenshot saved to: %s", filepath.Clean(outputPath)), nil
}
func (t *BrowserTool) fetchDynamicContent(ctx context.Context, url string) (string, error) {
// 简单实现dump-dom
cmd := exec.CommandContext(ctx, "chromium-browser",
"--headless",
"--disable-gpu",
"--no-sandbox",
"--dump-dom",
url)
output, err := cmd.Output()
if err != nil {
return "", fmt.Errorf("failed to fetch content: %w", err)
if !t.client.Available() {
return "", fmt.Errorf("failed to fetch content: no chromium-compatible browser available")
}
return string(output), nil
return t.client.Content(ctx, url)
}

View File

@@ -6,6 +6,8 @@ import (
"sync"
)
const maxParallelToolCalls = 8
type ParallelTool struct {
registry *ToolRegistry
}
@@ -61,6 +63,7 @@ func (t *ParallelTool) Execute(ctx context.Context, args map[string]interface{})
results := make(map[string]string)
var mu sync.Mutex
var wg sync.WaitGroup
sem := make(chan struct{}, maxParallelToolCalls)
for i, c := range callsRaw {
call, ok := c.(map[string]interface{})
@@ -78,8 +81,11 @@ func (t *ParallelTool) Execute(ctx context.Context, args map[string]interface{})
wg.Add(1)
go func(id, name string, args map[string]interface{}) {
defer wg.Done()
sem <- struct{}{}
defer func() { <-sem }()
res, err := t.registry.Execute(ctx, name, args)
mu.Lock()
defer mu.Unlock()
if err != nil {

View File

@@ -6,6 +6,8 @@ import (
"sync"
)
const maxParallelFetchCalls = 8
type ParallelFetchTool struct {
fetcher *WebFetchTool
}
@@ -46,6 +48,7 @@ func (t *ParallelFetchTool) Execute(ctx context.Context, args map[string]interfa
results := make([]string, len(urlsRaw))
var wg sync.WaitGroup
sem := make(chan struct{}, maxParallelFetchCalls)
for i, u := range urlsRaw {
urlStr, ok := u.(string)
@@ -56,6 +59,9 @@ func (t *ParallelFetchTool) Execute(ctx context.Context, args map[string]interfa
wg.Add(1)
go func(index int, url string) {
defer wg.Done()
sem <- struct{}{}
defer func() { <-sem }()
res, err := t.fetcher.Execute(ctx, map[string]interface{}{"url": url})
if err != nil {
results[index] = fmt.Sprintf("Error fetching %s: %v", url, err)

View File

@@ -13,7 +13,8 @@ import (
)
const (
userAgent = "Mozilla/5.0 (compatible; clawgo/1.0)"
userAgent = "Mozilla/5.0 (compatible; clawgo/1.0)"
maxFetchResponseBytes = 8 * 1024 * 1024
)
type WebSearchTool struct {
@@ -93,10 +94,14 @@ func (t *WebSearchTool) Execute(ctx context.Context, args map[string]interface{}
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
limitedReader := io.LimitReader(resp.Body, maxFetchResponseBytes+1)
body, err := io.ReadAll(limitedReader)
if err != nil {
return "", fmt.Errorf("failed to read response: %w", err)
}
if len(body) > maxFetchResponseBytes {
return "", fmt.Errorf("response body too large (>%d bytes)", maxFetchResponseBytes)
}
var searchResp struct {
Web struct {