commit 4956b1a014aa7dcb2f10de73416423861cdec243 Author: DBT Date: Thu Feb 12 02:57:27 2026 +0000 Initial commit for ClawGo diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..9c7a79d --- /dev/null +++ b/.gitignore @@ -0,0 +1,18 @@ +bin/ +*.exe +*.dll +*.so +*.dylib +*.test +*.out +.clawgo/ +config.json +sessions/ +coverage.txt +coverage.html +.DS_Store +build + +clawgo + +.idea \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..7237f49 --- /dev/null +++ b/LICENSE @@ -0,0 +1,25 @@ +MIT License + +Copyright (c) 2026 ClawGo contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +--- + +ClawGo is heavily inspired by and based on [nanobot](https://github.com/HKUDS/nanobot) by HKUDS. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..977489c --- /dev/null +++ b/Makefile @@ -0,0 +1,179 @@ +.PHONY: all build install uninstall clean help test + +# Build variables +BINARY_NAME=clawgo +BUILD_DIR=build +CMD_DIR=cmd/$(BINARY_NAME) +MAIN_GO=$(CMD_DIR)/main.go + +# Version +VERSION?=$(shell git describe --tags --always --dirty 2>/dev/null || echo "dev") +BUILD_TIME=$(shell date +%FT%T%z) +LDFLAGS=-ldflags "-X main.version=$(VERSION) -X main.buildTime=$(BUILD_TIME)" + +# Go variables +GO?=go +GOFLAGS?=-v + +# Installation +INSTALL_PREFIX?=$(HOME)/.local +INSTALL_BIN_DIR=$(INSTALL_PREFIX)/bin +INSTALL_MAN_DIR=$(INSTALL_PREFIX)/share/man/man1 + +# Workspace and Skills +CLAWGO_HOME?=$(HOME)/.clawgo +WORKSPACE_DIR?=$(CLAWGO_HOME)/workspace +WORKSPACE_SKILLS_DIR=$(WORKSPACE_DIR)/skills +BUILTIN_SKILLS_DIR=$(CURDIR)/skills + +# OS detection +UNAME_S:=$(shell uname -s) +UNAME_M:=$(shell uname -m) + +# Platform-specific settings +ifeq ($(UNAME_S),Linux) + PLATFORM=linux + ifeq ($(UNAME_M),x86_64) + ARCH=amd64 + else ifeq ($(UNAME_M),aarch64) + ARCH=arm64 + else ifeq ($(UNAME_M),riscv64) + ARCH=riscv64 + else + ARCH=$(UNAME_M) + endif +else ifeq ($(UNAME_S),Darwin) + PLATFORM=darwin + ifeq ($(UNAME_M),x86_64) + ARCH=amd64 + else ifeq ($(UNAME_M),arm64) + ARCH=arm64 + else + ARCH=$(UNAME_M) + endif +else + PLATFORM=$(UNAME_S) + ARCH=$(UNAME_M) +endif + +BINARY_PATH=$(BUILD_DIR)/$(BINARY_NAME)-$(PLATFORM)-$(ARCH) + +# Default target +all: build + +## build: Build the clawgo binary for current platform +build: + @echo "Building $(BINARY_NAME) for $(PLATFORM)/$(ARCH)..." + @mkdir -p $(BUILD_DIR) + $(GO) build $(GOFLAGS) $(LDFLAGS) -o $(BINARY_PATH) ./$(CMD_DIR) + @echo "Build complete: $(BINARY_PATH)" + @ln -sf $(BINARY_NAME)-$(PLATFORM)-$(ARCH) $(BUILD_DIR)/$(BINARY_NAME) + +## build-all: Build clawgo for all platforms +build-all: + @echo "Building for multiple platforms..." + @mkdir -p $(BUILD_DIR) + GOOS=linux GOARCH=amd64 $(GO) build $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-linux-amd64 ./$(CMD_DIR) + GOOS=linux GOARCH=arm64 $(GO) build $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-linux-arm64 ./$(CMD_DIR) + GOOS=linux GOARCH=riscv64 $(GO) build $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-linux-riscv64 ./$(CMD_DIR) +# GOOS=darwin GOARCH=amd64 $(GO) build $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-darwin-amd64 ./$(CMD_DIR) + GOOS=windows GOARCH=amd64 $(GO) build $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-windows-amd64.exe ./$(CMD_DIR) + @echo "All builds complete" + +## install: Install clawgo to system and copy builtin skills +install: build + @echo "Installing $(BINARY_NAME)..." + @mkdir -p $(INSTALL_BIN_DIR) + @cp $(BUILD_DIR)/$(BINARY_NAME) $(INSTALL_BIN_DIR)/$(BINARY_NAME) + @chmod +x $(INSTALL_BIN_DIR)/$(BINARY_NAME) + @echo "Installed binary to $(INSTALL_BIN_DIR)/$(BINARY_NAME)" + @echo "Installing builtin skills to $(WORKSPACE_SKILLS_DIR)..." + @mkdir -p $(WORKSPACE_SKILLS_DIR) + @for skill in $(BUILTIN_SKILLS_DIR)/*/; do \ + if [ -d "$$skill" ]; then \ + skill_name=$$(basename "$$skill"); \ + if [ -f "$$skill/SKILL.md" ]; then \ + cp -r "$$skill" $(WORKSPACE_SKILLS_DIR); \ + echo " ✓ Installed skill: $$skill_name"; \ + fi; \ + fi; \ + done + @echo "Installation complete!" + +## install-skills: Install builtin skills to workspace +install-skills: + @echo "Installing builtin skills to $(WORKSPACE_SKILLS_DIR)..." + @mkdir -p $(WORKSPACE_SKILLS_DIR) + @for skill in $(BUILTIN_SKILLS_DIR)/*/; do \ + if [ -d "$$skill" ]; then \ + skill_name=$$(basename "$$skill"); \ + if [ -f "$$skill/SKILL.md" ]; then \ + mkdir -p $(WORKSPACE_SKILLS_DIR)/$$skill_name; \ + cp -r "$$skill" $(WORKSPACE_SKILLS_DIR); \ + echo " ✓ Installed skill: $$skill_name"; \ + fi; \ + fi; \ + done + @echo "Skills installation complete!" + +## uninstall: Remove clawgo from system +uninstall: + @echo "Uninstalling $(BINARY_NAME)..." + @rm -f $(INSTALL_BIN_DIR)/$(BINARY_NAME) + @echo "Removed binary from $(INSTALL_BIN_DIR)/$(BINARY_NAME)" + @echo "Note: Only the executable file has been deleted." + @echo "If you need to delete all configurations (config.json, workspace, etc.), run 'make uninstall-all'" + +## uninstall-all: Remove clawgo and all data +uninstall-all: + @echo "Removing workspace and skills..." + @rm -rf $(CLAWGO_HOME) + @echo "Removed workspace: $(CLAWGO_HOME)" + @echo "Complete uninstallation done!" + +## clean: Remove build artifacts +clean: + @echo "Cleaning build artifacts..." + @rm -rf $(BUILD_DIR) + @echo "Clean complete" + +## fmt: Format Go code +fmt: + @$(GO) fmt ./... + +## deps: Update dependencies +deps: + @$(GO) get -u ./... + @$(GO) mod tidy + +## run: Build and run clawgo +run: build + @$(BUILD_DIR)/$(BINARY_NAME) $(ARGS) + +## help: Show this help message +help: + @echo "clawgo Makefile" + @echo "" + @echo "Usage:" + @echo " make [target]" + @echo "" + @echo "Targets:" + @grep -E '^## ' $(MAKEFILE_LIST) | sed 's/## / /' + @echo "" + @echo "Examples:" + @echo " make build # Build for current platform" + @echo " make install # Install to /usr/local/bin" + @echo " make install-user # Install to ~/.local/bin" + @echo " make uninstall # Remove from /usr/local/bin" + @echo " make install-skills # Install skills to workspace" + @echo "" + @echo "Environment Variables:" + @echo " INSTALL_PREFIX # Installation prefix (default: /usr/local)" + @echo " WORKSPACE_DIR # Workspace directory (default: ~/.clawgo/workspace)" + @echo " VERSION # Version string (default: git describe)" + @echo "" + @echo "Current Configuration:" + @echo " Platform: $(PLATFORM)/$(ARCH)" + @echo " Binary: $(BINARY_PATH)" + @echo " Install Prefix: $(INSTALL_PREFIX)" + @echo " Workspace: $(WORKSPACE_DIR)" diff --git a/README.md b/README.md new file mode 100644 index 0000000..e7ab374 --- /dev/null +++ b/README.md @@ -0,0 +1,104 @@ +# ClawGo: 极致轻量的 Go 语言 AI 助手 + +[English](./README_EN.md) + +**ClawGo** 是一个用 Go 编写的小巧而强大的 AI 助手。受 [nanobot](https://github.com/HKUDS/nanobot) 启发,它从底层进行了重构,可以在几乎任何设备上运行——从高端服务器到 $10 的 RISC-V 开发板。 + +## 🚀 为什么选择 ClawGo? + +- **🪶 极小占用**:内存占用 <10MB。在 Node.js 和 Python 无法运行的地方自如穿梭。 +- **⚡ 瞬时启动**:启动时间 <1 秒。无需沉重的运行时预热。 +- **💰 极低成本**:完美适配 LicheeRV Nano 或 Orange Pi Zero 等 $10 级别的单板机。 +- **🔌 即插即用**:单二进制文件,无复杂依赖。 +- **🧩 技能系统**:通过 `clawhub`、`coding-agent` 等技能扩展能力。 +- **🔐 便捷认证**:交互式 `login` 命令,支持 OpenAI、Anthropic、Gemini 等主流服务商。 + +## 🏁 快速开始 + +**1. 初始化** +```bash +clawgo onboard +``` + +**2. 配置服务商** +交互式设置您的 API Key (OpenAI, Anthropic, Gemini, Zhipu 等): +```bash +clawgo login +# 或者直接指定服务商: +# clawgo login openai +# clawgo login anthropic +# clawgo login gemini +``` + +**3. 开始聊天!** +```bash +clawgo agent -m "你好!你是谁?" +``` + +## 📦 技能系统 (Skills System) + +ClawGo 不仅仅是一个聊天机器人,它是一个可以使用工具的智能体。 + +**管理技能:** +```bash +# 列出已安装的技能 +clawgo skills list + +# 列出内置技能 +clawgo skills list-builtin + +# 安装特定技能 (例如 weather) +clawgo skills install-builtin +``` + +**特色技能:** +- **coding-agent**: 运行 Codex/Claude 执行自主编程任务。 +- **healthcheck**: 安全审计与主机加固。 +- **video-frames**: 使用 ffmpeg 从视频中提取帧。 +- **clawhub**: 管理社区提供的技能。 + +## 💬 连接频道 (Channels) + +运行 `clawgo gateway` 让 ClawGo 在你最喜欢的平台上 24/7 在线。 + +| 频道 | 状态 | 配置方式 | +|---------|--------|-------| +| **Telegram** | ✅ 就绪 | Bot Token | +| **Discord** | ✅ 就绪 | Bot Token + Intents | +| **QQ** | ✅ 就绪 | AppID + AppSecret | +| **DingTalk** | ✅ 就绪 | Client ID + Secret | + +*在 `~/.clawgo/config.json` 中配置频道。* + +## 🛠️ 安装 + +### 预编译二进制文件 +从 [发布页面](https://gitea.kkkk.dev/DBT/clawgo/releases) 下载适合您平台的固件 (Linux/macOS/Windows, x86/ARM/RISC-V)。 + +### 从源码编译 +```bash +git clone https://gitea.kkkk.dev/DBT/clawgo.git +cd clawgo +make deps +make build +make install +``` + +## 📊 对比 + +| 特性 | OpenClaw (Node) | NanoBot (Python) | **ClawGo (Go)** | +| :--- | :---: | :---: | :---: | +| **内存占用** | >1GB | >100MB | **< 10MB** | +| **启动时间** | 较慢 (>5s) | 中等 (>2s) | **瞬时 (<0.1s)** | +| **二进制大小** | 无 (源码) | 无 (源码) | **单文件 (~15MB)** | +| **架构支持** | x86/ARM | x86/ARM | **x86/ARM/RISC-V** | + +## 🤝 社区 + +加入讨论! +- **Discord**: [加入服务器](https://discord.gg/V4sAZ9XWpN) +- **Issues**: [GitHub Issues](https://gitea.kkkk.dev/DBT/clawgo/issues) + +## 📜 许可证 + +MIT 许可证。永远免费开源。 🦐 diff --git a/README_EN.md b/README_EN.md new file mode 100644 index 0000000..af0ce35 --- /dev/null +++ b/README_EN.md @@ -0,0 +1,100 @@ +**ClawGo** is a tiny but mighty AI assistant written in Go. Inspired by [nanobot](https://github.com/HKUDS/nanobot), it was refactored from the ground up to run on almost anything—from high-end servers to $10 RISC-V boards. + +## 🚀 Why ClawGo? + +- **🪶 Tiny Footprint**: <10MB RAM usage. Runs where Node.js and Python fear to tread. +- **⚡ Instant Boot**: Starts in <1 second. No heavy runtime warmup. +- **💰 Ultra-Low Cost**: Perfect for $10 SBCs like LicheeRV Nano or Orange Pi Zero. +- **🔌 Plug & Play**: Single binary. No complex dependencies. +- **🧩 Skill System**: Extend capabilities with `clawhub`, `coding-agent`, and more. +- **🔐 Easy Auth**: Interactive `login` command for OpenAI, Anthropic, Gemini, etc. + +## 🏁 Quick Start + +**1. Initialize** +```bash +clawgo onboard +``` + +**2. Configure Provider** +Interactively set up your API key (OpenAI, Anthropic, Gemini, Zhipu, etc.): +```bash +clawgo login +# Or specify provider directly: +# clawgo login openai +# clawgo login anthropic +# clawgo login gemini +``` + +**3. Chat!** +```bash +clawgo agent -m "Hello! Who are you?" +``` + +## 📦 Skills System + +ClawGo isn't just a chatbot—it's an agent that can use tools. + +**Manage Skills:** +```bash +# List installed skills +clawgo skills list + +# List builtin skills +clawgo skills list-builtin + +# Install a specific skill (e.g., weather) +clawgo skills install-builtin +``` + +**Featured Skills:** +- **coding-agent**: Run Codex/Claude for autonomous coding tasks. +- **healthcheck**: Security auditing and host hardening. +- **video-frames**: Extract frames from video using ffmpeg. +- **clawhub**: Manage skills from the community. + +## 💬 Connect Channels + +Run `clawgo gateway` to turn ClawGo into a 24/7 bot on your favorite platform. + +| Channel | Status | Setup | +|---------|--------|-------| +| **Telegram** | ✅ Ready | Bot Token | +| **Discord** | ✅ Ready | Bot Token + Intents | +| **QQ** | ✅ Ready | AppID + AppSecret | +| **DingTalk** | ✅ Ready | Client ID + Secret | + +*Configure channels in `~/.clawgo/config.json`.* + +## 🛠️ Installation + +### Pre-built Binaries +Download the latest release for your platform (Linux/macOS/Windows, x86/ARM/RISC-V) from the [Releases Page](https://gitea.kkkk.dev/DBT/clawgo/releases). + +### Build from Source +```bash +git clone https://gitea.kkkk.dev/DBT/clawgo.git +cd clawgo +make deps +make build +make install +``` + +## 📊 Comparison + +| Feature | OpenClaw (Node) | NanoBot (Python) | **ClawGo (Go)** | +| :--- | :---: | :---: | :---: | +| **RAM Usage** | >1GB | >100MB | **< 10MB** | +| **Startup Time** | Slow (>5s) | Medium (>2s) | **Instant (<0.1s)** | +| **Binary Size** | N/A (Source) | N/A (Source) | **Single File (~15MB)** | +| **Architecture** | x86/ARM | x86/ARM | **x86/ARM/RISC-V** | + +## 🤝 Community + +Join the discussion! +- **Discord**: [Join Server](https://discord.gg/V4sAZ9XWpN) +- **Issues**: [GitHub Issues](https://gitea.kkkk.dev/DBT/clawgo/issues) + +## 📜 License + +MIT License. Free and open source forever. 🦐 diff --git a/config.example.json b/config.example.json new file mode 100644 index 0000000..ce426c1 --- /dev/null +++ b/config.example.json @@ -0,0 +1,90 @@ +{ + "agents": { + "defaults": { + "workspace": "~/.clawgo/workspace", + "model": "glm-4.7", + "max_tokens": 8192, + "temperature": 0.7, + "max_tool_iterations": 20 + } + }, + "channels": { + "telegram": { + "enabled": false, + "token": "YOUR_TELEGRAM_BOT_TOKEN", + "allow_from": ["YOUR_USER_ID"] + }, + "discord": { + "enabled": false, + "token": "YOUR_DISCORD_BOT_TOKEN", + "allow_from": [] + }, + "maixcam": { + "enabled": false, + "host": "0.0.0.0", + "port": 18790, + "allow_from": [] + }, + "whatsapp": { + "enabled": false, + "bridge_url": "ws://localhost:3001", + "allow_from": [] + }, + "feishu": { + "enabled": false, + "app_id": "", + "app_secret": "", + "encrypt_key": "", + "verification_token": "", + "allow_from": [] + }, + "dingtalk": { + "enabled": false, + "client_id": "YOUR_CLIENT_ID", + "client_secret": "YOUR_CLIENT_SECRET", + "allow_from": [] + } + }, + "providers": { + "anthropic": { + "api_key": "", + "api_base": "" + }, + "openai": { + "api_key": "", + "api_base": "" + }, + "openrouter": { + "api_key": "sk-or-v1-xxx", + "api_base": "" + }, + "groq": { + "api_key": "gsk_xxx", + "api_base": "" + }, + "zhipu": { + "api_key": "YOUR_ZHIPU_API_KEY", + "api_base": "" + }, + "gemini": { + "api_key": "", + "api_base": "" + }, + "vllm": { + "api_key": "", + "api_base": "" + } + }, + "tools": { + "web": { + "search": { + "api_key": "YOUR_BRAVE_API_KEY", + "max_results": 5 + } + } + }, + "gateway": { + "host": "0.0.0.0", + "port": 18790 + } +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..c9c778f --- /dev/null +++ b/go.mod @@ -0,0 +1,41 @@ +module gitea.kkkk.dev/DBT/clawgo + +go 1.25.5 + +require ( + github.com/bwmarrin/discordgo v0.29.0 + github.com/caarlos0/env/v11 v11.3.1 + github.com/chzyer/readline v1.5.1 + github.com/gorilla/websocket v1.5.3 + github.com/larksuite/oapi-sdk-go/v3 v3.5.3 + github.com/mymmrac/telego v1.6.0 + github.com/open-dingtalk/dingtalk-stream-sdk-go v0.9.1 + github.com/tencent-connect/botgo v0.2.1 + golang.org/x/oauth2 v0.35.0 +) + +require ( + github.com/andybalholm/brotli v1.2.0 // indirect + github.com/bytedance/gopkg v0.1.3 // indirect + github.com/bytedance/sonic v1.15.0 // indirect + github.com/bytedance/sonic/loader v0.5.0 // indirect + github.com/cloudwego/base64x v0.1.6 // indirect + github.com/go-resty/resty/v2 v2.17.1 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/grbit/go-json v0.11.0 // indirect + github.com/klauspost/compress v1.18.2 // indirect + github.com/klauspost/cpuid/v2 v2.2.9 // indirect + github.com/tidwall/gjson v1.18.0 // indirect + github.com/tidwall/match v1.2.0 // indirect + github.com/tidwall/pretty v1.2.1 // indirect + github.com/twitchyliquid64/golang-asm v0.15.1 // indirect + github.com/valyala/bytebufferpool v1.0.0 // indirect + github.com/valyala/fasthttp v1.69.0 // indirect + github.com/valyala/fastjson v1.6.7 // indirect + golang.org/x/arch v0.0.0-20210923205945-b76863e36670 // indirect + golang.org/x/crypto v0.48.0 // indirect + golang.org/x/net v0.50.0 // indirect + golang.org/x/sync v0.19.0 // indirect + golang.org/x/sys v0.41.0 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..bbec4f9 --- /dev/null +++ b/go.sum @@ -0,0 +1,238 @@ +cloud.google.com/go/compute/metadata v0.3.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= +github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ= +github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY= +github.com/bwmarrin/discordgo v0.29.0 h1:FmWeXFaKUwrcL3Cx65c20bTRW+vOb6k8AnaP+EgjDno= +github.com/bwmarrin/discordgo v0.29.0/go.mod h1:NJZpH+1AfhIcyQsPeuBKsUtYrRnjkyu0kIVMCHkZtRY= +github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= +github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= +github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE= +github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k= +github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE= +github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo= +github.com/caarlos0/env/v11 v11.3.1 h1:cArPWC15hWmEt+gWk7YBi7lEXTXCvpaSdCiZE2X5mCA= +github.com/caarlos0/env/v11 v11.3.1/go.mod h1:qupehSf/Y0TUTsxKywqRt/vJjN5nz6vauiYEUUr8P4U= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.2.1 h1:XHDu3E6q+gdHgsdTPH6ImJMIp436vR6MPtH8gP05QzM= +github.com/chzyer/logex v1.2.1/go.mod h1:JLbx6lG2kDbNRFnfkgvh4eRJRPX1QCoOIWomwysCBrQ= +github.com/chzyer/readline v1.5.1 h1:upd/6fQk4src78LMRzh5vItIt361/o4uq553V8B5sGI= +github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObkaSkeBlk= +github.com/chzyer/test v1.0.0 h1:p3BQDXSxOhOG0P9z6/hGnII4LGiEPOYBhs8asl/fC04= +github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8= +github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= +github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/go-redis/redis/v8 v8.11.4/go.mod h1:2Z2wHZXdQpCDXEGzqMockDpNyYvi2l4Pxt6RJr792+w= +github.com/go-resty/resty/v2 v2.6.0/go.mod h1:PwvJS6hvaPkjtjNg9ph+VrSD92bi5Zq73w/BIH7cC3Q= +github.com/go-resty/resty/v2 v2.17.1 h1:x3aMpHK1YM9e4va/TMDRlusDDoZiQ+ViDu/WpA6xTM4= +github.com/go-resty/resty/v2 v2.17.1/go.mod h1:kCKZ3wWmwJaNc7S29BRtUhJwy7iqmn+2mLtQrOyQlVA= +github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg= +github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/grbit/go-json v0.11.0 h1:bAbyMdYrYl/OjYsSqLH99N2DyQ291mHy726Mx+sYrnc= +github.com/grbit/go-json v0.11.0/go.mod h1:IYpHsdybQ386+6g3VE6AXQ3uTGa5mquBme5/ZWmtzek= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.18.2 h1:iiPHWW0YrcFgpBYhsA6D1+fqHssJscY/Tm/y2Uqnapk= +github.com/klauspost/compress v1.18.2/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4= +github.com/klauspost/cpuid/v2 v2.2.9 h1:66ze0taIn2H33fBvCkXuv9BmCwDfafmiIVpKV9kKGuY= +github.com/klauspost/cpuid/v2 v2.2.9/go.mod h1:rqkxqrZ1EhYM9G+hXH7YdowN5R5RGN6NK4QwQ3WMXF8= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/larksuite/oapi-sdk-go/v3 v3.5.3 h1:xvf8Dv29kBXC5/DNDCLhHkAFW8l/0LlQJimO5Zn+JUk= +github.com/larksuite/oapi-sdk-go/v3 v3.5.3/go.mod h1:ZEplY+kwuIrj/nqw5uSCINNATcH3KdxSN7y+UxYY5fI= +github.com/mymmrac/telego v1.6.0 h1:Zc8rgyHozvd/7ZgyrigyHdAF9koHYMfilYfyB6wlFC0= +github.com/mymmrac/telego v1.6.0/go.mod h1:xt6ZWA8zi8KmuzryE1ImEdl9JSwjHNpM4yhC7D8hU4Y= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= +github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= +github.com/onsi/gomega v1.16.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/open-dingtalk/dingtalk-stream-sdk-go v0.9.1 h1:Lb/Uzkiw2Ugt2Xf03J5wmv81PdkYOiWbI8CNBi1boC8= +github.com/open-dingtalk/dingtalk-stream-sdk-go v0.9.1/go.mod h1:ln3IqPYYocZbYvl9TAOrG/cxGR9xcn4pnZRLdCTEGEU= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/tencent-connect/botgo v0.2.1 h1:+BrTt9Zh+awL28GWC4g5Na3nQaGRWb0N5IctS8WqBCk= +github.com/tencent-connect/botgo v0.2.1/go.mod h1:oO1sG9ybhXNickvt+CVym5khwQ+uKhTR+IhTqEfOVsI= +github.com/tidwall/gjson v1.9.3/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY= +github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/match v1.2.0 h1:0pt8FlkOwjN2fPt4bIl4BoNxb98gGHN2ObFEDkrfZnM= +github.com/tidwall/match v1.2.0/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasthttp v1.69.0 h1:fNLLESD2SooWeh2cidsuFtOcrEi4uB4m1mPrkJMZyVI= +github.com/valyala/fasthttp v1.69.0/go.mod h1:4wA4PfAraPlAsJ5jMSqCE2ug5tqUPwKXxVj8oNECGcw= +github.com/valyala/fastjson v1.6.7 h1:ZE4tRy0CIkh+qDc5McjatheGX2czdn8slQjomexVpBM= +github.com/valyala/fastjson v1.6.7/go.mod h1:CLCAqky6SMuOcxStkYQvblddUtoRxhYMGLrsQns1aXY= +github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU= +github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y= +go.uber.org/mock v0.6.0/go.mod h1:KiVJ4BqZJaMj4svdfmHM0AUx4NJYO8ZNpPnZn1Z+BBU= +golang.org/x/arch v0.0.0-20210923205945-b76863e36670 h1:18EFjUmQOcUvxNYSkA6jO9VAiXCnxFY6NyDX0bHDmkU= +golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts= +golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= +golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60= +golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM= +golang.org/x/oauth2 v0.23.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/oauth2 v0.35.0 h1:Mv2mzuHuZuY2+bkyWXIHMfhNdJAdwW3FuWeCPYN5GVQ= +golang.org/x/oauth2 v0.35.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= +golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k= +golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= +golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/pkg/agent/context.go b/pkg/agent/context.go new file mode 100644 index 0000000..f0e3ea8 --- /dev/null +++ b/pkg/agent/context.go @@ -0,0 +1,246 @@ +package agent + +import ( + "fmt" + "os" + "path/filepath" + "runtime" + "strings" + "time" + + "gitea.kkkk.dev/DBT/clawgo/pkg/logger" + "gitea.kkkk.dev/DBT/clawgo/pkg/providers" + "gitea.kkkk.dev/DBT/clawgo/pkg/skills" +) + +type ContextBuilder struct { + workspace string + skillsLoader *skills.SkillsLoader + memory *MemoryStore + toolsSummary func() []string // Function to get tool summaries dynamically +} + +func getGlobalConfigDir() string { + home, err := os.UserHomeDir() + if err != nil { + return "" + } + return filepath.Join(home, ".clawgo") +} + +func NewContextBuilder(workspace string, toolsSummaryFunc func() []string) *ContextBuilder { + // builtin skills: 当前项目的 skills 目录 + // 使用当前工作目录下的 skills/ 目录 + wd, _ := os.Getwd() + builtinSkillsDir := filepath.Join(wd, "skills") + globalSkillsDir := filepath.Join(getGlobalConfigDir(), "skills") + + return &ContextBuilder{ + workspace: workspace, + skillsLoader: skills.NewSkillsLoader(workspace, globalSkillsDir, builtinSkillsDir), + memory: NewMemoryStore(workspace), + toolsSummary: toolsSummaryFunc, + } +} + +func (cb *ContextBuilder) getIdentity() string { + now := time.Now().Format("2006-01-02 15:04 (Monday)") + workspacePath, _ := filepath.Abs(filepath.Join(cb.workspace)) + runtime := fmt.Sprintf("%s %s, Go %s", runtime.GOOS, runtime.GOARCH, runtime.Version()) + + // Build tools section dynamically + toolsSection := cb.buildToolsSection() + + return fmt.Sprintf(`# clawgo 🦞 + +You are clawgo, a helpful AI assistant. + +## Current Time +%s + +## Runtime +%s + +## Workspace +Your workspace is at: %s +- Memory: %s/memory/MEMORY.md +- Daily Notes: %s/memory/YYYYMM/YYYYMMDD.md +- Skills: %s/skills/{skill-name}/SKILL.md + +%s + +Always be helpful, accurate, and concise. When using tools, explain what you're doing. +When remembering something, write to %s/memory/MEMORY.md`, + now, runtime, workspacePath, workspacePath, workspacePath, workspacePath, toolsSection, workspacePath) +} + +func (cb *ContextBuilder) buildToolsSection() string { + if cb.toolsSummary == nil { + return "" + } + + summaries := cb.toolsSummary() + if len(summaries) == 0 { + return "" + } + + var sb strings.Builder + sb.WriteString("## Available Tools\n\n") + sb.WriteString("You have access to the following tools:\n\n") + for _, s := range summaries { + sb.WriteString(s) + sb.WriteString("\n") + } + + return sb.String() +} + +func (cb *ContextBuilder) BuildSystemPrompt() string { + parts := []string{} + + // Core identity section + parts = append(parts, cb.getIdentity()) + + // Bootstrap files + bootstrapContent := cb.LoadBootstrapFiles() + if bootstrapContent != "" { + parts = append(parts, bootstrapContent) + } + + // Skills - show summary, AI can read full content with read_file tool + skillsSummary := cb.skillsLoader.BuildSkillsSummary() + if skillsSummary != "" { + parts = append(parts, fmt.Sprintf(`# Skills + +The following skills extend your capabilities. To use a skill, read its SKILL.md file using the read_file tool. + +%s`, skillsSummary)) + } + + // Memory context + memoryContext := cb.memory.GetMemoryContext() + if memoryContext != "" { + parts = append(parts, "# Memory\n\n"+memoryContext) + } + + // Join with "---" separator + return strings.Join(parts, "\n\n---\n\n") +} + +func (cb *ContextBuilder) LoadBootstrapFiles() string { + bootstrapFiles := []string{ + "AGENTS.md", + "SOUL.md", + "USER.md", + "IDENTITY.md", + } + + var result string + for _, filename := range bootstrapFiles { + filePath := filepath.Join(cb.workspace, filename) + if data, err := os.ReadFile(filePath); err == nil { + result += fmt.Sprintf("## %s\n\n%s\n\n", filename, string(data)) + } + } + + return result +} + +func (cb *ContextBuilder) BuildMessages(history []providers.Message, summary string, currentMessage string, media []string, channel, chatID string) []providers.Message { + messages := []providers.Message{} + + systemPrompt := cb.BuildSystemPrompt() + + // Add Current Session info if provided + if channel != "" && chatID != "" { + systemPrompt += fmt.Sprintf("\n\n## Current Session\nChannel: %s\nChat ID: %s", channel, chatID) + } + + // Log system prompt summary for debugging (debug mode only) + logger.DebugCF("agent", "System prompt built", + map[string]interface{}{ + "total_chars": len(systemPrompt), + "total_lines": strings.Count(systemPrompt, "\n") + 1, + "section_count": strings.Count(systemPrompt, "\n\n---\n\n") + 1, + }) + + // Log preview of system prompt (avoid logging huge content) + preview := systemPrompt + if len(preview) > 500 { + preview = preview[:500] + "... (truncated)" + } + logger.DebugCF("agent", "System prompt preview", + map[string]interface{}{ + "preview": preview, + }) + + if summary != "" { + systemPrompt += "\n\n## Summary of Previous Conversation\n\n" + summary + } + + messages = append(messages, providers.Message{ + Role: "system", + Content: systemPrompt, + }) + + messages = append(messages, history...) + + messages = append(messages, providers.Message{ + Role: "user", + Content: currentMessage, + }) + + return messages +} + +func (cb *ContextBuilder) AddToolResult(messages []providers.Message, toolCallID, toolName, result string) []providers.Message { + messages = append(messages, providers.Message{ + Role: "tool", + Content: result, + ToolCallID: toolCallID, + }) + return messages +} + +func (cb *ContextBuilder) AddAssistantMessage(messages []providers.Message, content string, toolCalls []map[string]interface{}) []providers.Message { + msg := providers.Message{ + Role: "assistant", + Content: content, + } + // Always add assistant message, whether or not it has tool calls + messages = append(messages, msg) + return messages +} + +func (cb *ContextBuilder) loadSkills() string { + allSkills := cb.skillsLoader.ListSkills() + if len(allSkills) == 0 { + return "" + } + + var skillNames []string + for _, s := range allSkills { + skillNames = append(skillNames, s.Name) + } + + content := cb.skillsLoader.LoadSkillsForContext(skillNames) + if content == "" { + return "" + } + + return "# Skill Definitions\n\n" + content +} + +// GetSkillsInfo returns information about loaded skills. +func (cb *ContextBuilder) GetSkillsInfo() map[string]interface{} { + allSkills := cb.skillsLoader.ListSkills() + skillNames := make([]string, 0, len(allSkills)) + for _, s := range allSkills { + skillNames = append(skillNames, s.Name) + } + return map[string]interface{}{ + "total": len(allSkills), + "available": len(allSkills), + "names": skillNames, + } +} diff --git a/pkg/agent/loop.go b/pkg/agent/loop.go new file mode 100644 index 0000000..0d6e217 --- /dev/null +++ b/pkg/agent/loop.go @@ -0,0 +1,599 @@ +// ClawGo - Ultra-lightweight personal AI agent +// Inspired by and based on nanobot: https://github.com/HKUDS/nanobot +// License: MIT +// +// Copyright (c) 2026 ClawGo contributors + +package agent + +import ( + "context" + "encoding/json" + "fmt" + "os" + "path/filepath" + "regexp" + "strings" + + "gitea.kkkk.dev/DBT/clawgo/pkg/bus" + "gitea.kkkk.dev/DBT/clawgo/pkg/config" + "gitea.kkkk.dev/DBT/clawgo/pkg/cron" + "gitea.kkkk.dev/DBT/clawgo/pkg/logger" + "gitea.kkkk.dev/DBT/clawgo/pkg/providers" + "gitea.kkkk.dev/DBT/clawgo/pkg/session" + "gitea.kkkk.dev/DBT/clawgo/pkg/tools" +) + +type AgentLoop struct { + bus *bus.MessageBus + provider providers.LLMProvider + workspace string + model string + maxIterations int + sessions *session.SessionManager + contextBuilder *ContextBuilder + tools *tools.ToolRegistry + running bool +} + +func NewAgentLoop(cfg *config.Config, msgBus *bus.MessageBus, provider providers.LLMProvider, cs *cron.CronService) *AgentLoop { + workspace := cfg.WorkspacePath() + os.MkdirAll(workspace, 0755) + + toolsRegistry := tools.NewToolRegistry() + toolsRegistry.Register(&tools.ReadFileTool{}) + toolsRegistry.Register(&tools.WriteFileTool{}) + toolsRegistry.Register(&tools.ListDirTool{}) + toolsRegistry.Register(tools.NewExecTool(workspace)) + + if cs != nil { + toolsRegistry.Register(tools.NewRemindTool(cs)) + } + + braveAPIKey := cfg.Tools.Web.Search.APIKey + toolsRegistry.Register(tools.NewWebSearchTool(braveAPIKey, cfg.Tools.Web.Search.MaxResults)) + toolsRegistry.Register(tools.NewWebFetchTool(50000)) + + // Register message tool + messageTool := tools.NewMessageTool() + messageTool.SetSendCallback(func(channel, chatID, content string) error { + msgBus.PublishOutbound(bus.OutboundMessage{ + Channel: channel, + ChatID: chatID, + Content: content, + }) + return nil + }) + toolsRegistry.Register(messageTool) + + // Register spawn tool + subagentManager := tools.NewSubagentManager(provider, workspace, msgBus) + spawnTool := tools.NewSpawnTool(subagentManager) + toolsRegistry.Register(spawnTool) + + // Register edit file tool + editFileTool := tools.NewEditFileTool(workspace) + toolsRegistry.Register(editFileTool) + + // Register memory search tool + memorySearchTool := tools.NewMemorySearchTool(workspace) + toolsRegistry.Register(memorySearchTool) + + // Register camera tool + toolsRegistry.Register(tools.NewCameraTool(workspace)) + // Register system info tool + toolsRegistry.Register(tools.NewSystemInfoTool()) + + sessionsManager := session.NewSessionManager(filepath.Join(filepath.Dir(cfg.WorkspacePath()), "sessions")) + + return &AgentLoop{ + bus: msgBus, + provider: provider, + workspace: workspace, + model: cfg.Agents.Defaults.Model, + maxIterations: cfg.Agents.Defaults.MaxToolIterations, + sessions: sessionsManager, + contextBuilder: NewContextBuilder(workspace, func() []string { return toolsRegistry.GetSummaries() }), + tools: toolsRegistry, + running: false, + } +} + +func (al *AgentLoop) Run(ctx context.Context) error { + al.running = true + + for al.running { + select { + case <-ctx.Done(): + return nil + default: + msg, ok := al.bus.ConsumeInbound(ctx) + if !ok { + continue + } + + response, err := al.processMessage(ctx, msg) + if err != nil { + response = fmt.Sprintf("Error processing message: %v", err) + } + + if response != "" { + al.bus.PublishOutbound(bus.OutboundMessage{ + Channel: msg.Channel, + ChatID: msg.ChatID, + Content: response, + }) + } + } + } + + return nil +} + +func (al *AgentLoop) Stop() { + al.running = false +} + +func (al *AgentLoop) ProcessDirect(ctx context.Context, content, sessionKey string) (string, error) { + msg := bus.InboundMessage{ + Channel: "cli", + SenderID: "user", + ChatID: "direct", + Content: content, + SessionKey: sessionKey, + } + + return al.processMessage(ctx, msg) +} + +func (al *AgentLoop) processMessage(ctx context.Context, msg bus.InboundMessage) (string, error) { + // Add message preview to log + preview := truncate(msg.Content, 80) + logger.InfoCF("agent", fmt.Sprintf("Processing message from %s:%s: %s", msg.Channel, msg.SenderID, preview), + map[string]interface{}{ + "channel": msg.Channel, + "chat_id": msg.ChatID, + "sender_id": msg.SenderID, + "session_key": msg.SessionKey, + }) + + // Route system messages to processSystemMessage + if msg.Channel == "system" { + return al.processSystemMessage(ctx, msg) + } + + // Update tool contexts + if tool, ok := al.tools.Get("message"); ok { + if mt, ok := tool.(*tools.MessageTool); ok { + mt.SetContext(msg.Channel, msg.ChatID) + } + } + if tool, ok := al.tools.Get("spawn"); ok { + if st, ok := tool.(*tools.SpawnTool); ok { + st.SetContext(msg.Channel, msg.ChatID) + } + } + + history := al.sessions.GetHistory(msg.SessionKey) + summary := al.sessions.GetSummary(msg.SessionKey) + + messages := al.contextBuilder.BuildMessages( + history, + summary, + msg.Content, + nil, + msg.Channel, + msg.ChatID, + ) + + iteration := 0 + var finalContent string + + for iteration < al.maxIterations { + iteration++ + + logger.DebugCF("agent", "LLM iteration", + map[string]interface{}{ + "iteration": iteration, + "max": al.maxIterations, + }) + + toolDefs := al.tools.GetDefinitions() + providerToolDefs := make([]providers.ToolDefinition, 0, len(toolDefs)) + for _, td := range toolDefs { + providerToolDefs = append(providerToolDefs, providers.ToolDefinition{ + Type: td["type"].(string), + Function: providers.ToolFunctionDefinition{ + Name: td["function"].(map[string]interface{})["name"].(string), + Description: td["function"].(map[string]interface{})["description"].(string), + Parameters: td["function"].(map[string]interface{})["parameters"].(map[string]interface{}), + }, + }) + } + + // Log LLM request details + logger.DebugCF("agent", "LLM request", + map[string]interface{}{ + "iteration": iteration, + "model": al.model, + "messages_count": len(messages), + "tools_count": len(providerToolDefs), + "max_tokens": 8192, + "temperature": 0.7, + "system_prompt_len": len(messages[0].Content), + }) + + // Log full messages (detailed) + logger.DebugCF("agent", "Full LLM request", + map[string]interface{}{ + "iteration": iteration, + "messages_json": formatMessagesForLog(messages), + "tools_json": formatToolsForLog(providerToolDefs), + }) + + response, err := al.provider.Chat(ctx, messages, providerToolDefs, al.model, map[string]interface{}{ + "max_tokens": 8192, + "temperature": 0.7, + }) + + if err != nil { + logger.ErrorCF("agent", "LLM call failed", + map[string]interface{}{ + "iteration": iteration, + "error": err.Error(), + }) + return "", fmt.Errorf("LLM call failed: %w", err) + } + + if len(response.ToolCalls) == 0 { + finalContent = response.Content + logger.InfoCF("agent", "LLM response without tool calls (direct answer)", + map[string]interface{}{ + "iteration": iteration, + "content_chars": len(finalContent), + }) + break + } + + toolNames := make([]string, 0, len(response.ToolCalls)) + for _, tc := range response.ToolCalls { + toolNames = append(toolNames, tc.Name) + } + logger.InfoCF("agent", "LLM requested tool calls", + map[string]interface{}{ + "tools": toolNames, + "count": len(toolNames), + "iteration": iteration, + }) + + assistantMsg := providers.Message{ + Role: "assistant", + Content: response.Content, + } + + for _, tc := range response.ToolCalls { + argumentsJSON, _ := json.Marshal(tc.Arguments) + assistantMsg.ToolCalls = append(assistantMsg.ToolCalls, providers.ToolCall{ + ID: tc.ID, + Type: "function", + Function: &providers.FunctionCall{ + Name: tc.Name, + Arguments: string(argumentsJSON), + }, + }) + } + messages = append(messages, assistantMsg) + + for _, tc := range response.ToolCalls { + // Log tool call with arguments preview + argsJSON, _ := json.Marshal(tc.Arguments) + argsPreview := truncate(string(argsJSON), 200) + logger.InfoCF("agent", fmt.Sprintf("Tool call: %s(%s)", tc.Name, argsPreview), + map[string]interface{}{ + "tool": tc.Name, + "iteration": iteration, + }) + + result, err := al.tools.Execute(ctx, tc.Name, tc.Arguments) + if err != nil { + result = fmt.Sprintf("Error: %v", err) + } + + toolResultMsg := providers.Message{ + Role: "tool", + Content: result, + ToolCallID: tc.ID, + } + messages = append(messages, toolResultMsg) + } + } + + if finalContent == "" { + finalContent = "I've completed processing but have no response to give." + } + + // Filter out ... content from user-facing response + // Keep full content in debug logs if needed, but remove from final output + re := regexp.MustCompile(`(?s).*?`) + userContent := re.ReplaceAllString(finalContent, "") + userContent = strings.TrimSpace(userContent) + if userContent == "" && finalContent != "" { + // If only thoughts were present, maybe provide a generic "Done" or keep something? + // For now, let's assume thoughts are auxiliary and empty response is okay if tools did work. + // If no tools ran and only thoughts, user might be confused. + if iteration == 1 { + userContent = "Thinking process completed." + } + } + + al.sessions.AddMessage(msg.SessionKey, "user", msg.Content) + // We store the filtered content in history so the model sees what the user saw + // (or we could store full content if we want the model to remember its thoughts) + // The prompt says "filter out ... from the user-facing response". + // I'll store the filtered version to be safe. + al.sessions.AddMessage(msg.SessionKey, "assistant", userContent) + al.sessions.Save(al.sessions.GetOrCreate(msg.SessionKey)) + + // Log response preview (original content) + responsePreview := truncate(finalContent, 120) + logger.InfoCF("agent", fmt.Sprintf("Response to %s:%s: %s", msg.Channel, msg.SenderID, responsePreview), + map[string]interface{}{ + "iterations": iteration, + "final_length": len(finalContent), + "user_length": len(userContent), + }) + + return userContent, nil +} + +func (al *AgentLoop) processSystemMessage(ctx context.Context, msg bus.InboundMessage) (string, error) { + // Verify this is a system message + if msg.Channel != "system" { + return "", fmt.Errorf("processSystemMessage called with non-system message channel: %s", msg.Channel) + } + + logger.InfoCF("agent", "Processing system message", + map[string]interface{}{ + "sender_id": msg.SenderID, + "chat_id": msg.ChatID, + }) + + // Parse origin from chat_id (format: "channel:chat_id") + var originChannel, originChatID string + if idx := strings.Index(msg.ChatID, ":"); idx > 0 { + originChannel = msg.ChatID[:idx] + originChatID = msg.ChatID[idx+1:] + } else { + // Fallback + originChannel = "cli" + originChatID = msg.ChatID + } + + // Use the origin session for context + sessionKey := fmt.Sprintf("%s:%s", originChannel, originChatID) + + // Update tool contexts to original channel/chatID + if tool, ok := al.tools.Get("message"); ok { + if mt, ok := tool.(*tools.MessageTool); ok { + mt.SetContext(originChannel, originChatID) + } + } + if tool, ok := al.tools.Get("spawn"); ok { + if st, ok := tool.(*tools.SpawnTool); ok { + st.SetContext(originChannel, originChatID) + } + } + + // Build messages with the announce content + history := al.sessions.GetHistory(sessionKey) + summary := al.sessions.GetSummary(sessionKey) + messages := al.contextBuilder.BuildMessages( + history, + summary, + msg.Content, + nil, + originChannel, + originChatID, + ) + + iteration := 0 + var finalContent string + + for iteration < al.maxIterations { + iteration++ + + toolDefs := al.tools.GetDefinitions() + providerToolDefs := make([]providers.ToolDefinition, 0, len(toolDefs)) + for _, td := range toolDefs { + providerToolDefs = append(providerToolDefs, providers.ToolDefinition{ + Type: td["type"].(string), + Function: providers.ToolFunctionDefinition{ + Name: td["function"].(map[string]interface{})["name"].(string), + Description: td["function"].(map[string]interface{})["description"].(string), + Parameters: td["function"].(map[string]interface{})["parameters"].(map[string]interface{}), + }, + }) + } + + // Log LLM request details + logger.DebugCF("agent", "LLM request", + map[string]interface{}{ + "iteration": iteration, + "model": al.model, + "messages_count": len(messages), + "tools_count": len(providerToolDefs), + "max_tokens": 8192, + "temperature": 0.7, + "system_prompt_len": len(messages[0].Content), + }) + + // Log full messages (detailed) + logger.DebugCF("agent", "Full LLM request", + map[string]interface{}{ + "iteration": iteration, + "messages_json": formatMessagesForLog(messages), + "tools_json": formatToolsForLog(providerToolDefs), + }) + + response, err := al.provider.Chat(ctx, messages, providerToolDefs, al.model, map[string]interface{}{ + "max_tokens": 8192, + "temperature": 0.7, + }) + + if err != nil { + logger.ErrorCF("agent", "LLM call failed in system message", + map[string]interface{}{ + "iteration": iteration, + "error": err.Error(), + }) + return "", fmt.Errorf("LLM call failed: %w", err) + } + + if len(response.ToolCalls) == 0 { + finalContent = response.Content + break + } + + assistantMsg := providers.Message{ + Role: "assistant", + Content: response.Content, + } + + for _, tc := range response.ToolCalls { + argumentsJSON, _ := json.Marshal(tc.Arguments) + assistantMsg.ToolCalls = append(assistantMsg.ToolCalls, providers.ToolCall{ + ID: tc.ID, + Type: "function", + Function: &providers.FunctionCall{ + Name: tc.Name, + Arguments: string(argumentsJSON), + }, + }) + } + messages = append(messages, assistantMsg) + + for _, tc := range response.ToolCalls { + result, err := al.tools.Execute(ctx, tc.Name, tc.Arguments) + if err != nil { + result = fmt.Sprintf("Error: %v", err) + } + + toolResultMsg := providers.Message{ + Role: "tool", + Content: result, + ToolCallID: tc.ID, + } + messages = append(messages, toolResultMsg) + } + } + + if finalContent == "" { + finalContent = "Background task completed." + } + + // Save to session with system message marker + al.sessions.AddMessage(sessionKey, "user", fmt.Sprintf("[System: %s] %s", msg.SenderID, msg.Content)) + al.sessions.AddMessage(sessionKey, "assistant", finalContent) + al.sessions.Save(al.sessions.GetOrCreate(sessionKey)) + + logger.InfoCF("agent", "System message processing completed", + map[string]interface{}{ + "iterations": iteration, + "final_length": len(finalContent), + }) + + return finalContent, nil +} + +// truncate returns a truncated version of s with at most maxLen characters. +// If the string is truncated, "..." is appended to indicate truncation. +// If the string fits within maxLen, it is returned unchanged. +func truncate(s string, maxLen int) string { + if len(s) <= maxLen { + return s + } + // Reserve 3 chars for "..." + if maxLen <= 3 { + return s[:maxLen] + } + return s[:maxLen-3] + "..." +} + +// GetStartupInfo returns information about loaded tools and skills for logging. +func (al *AgentLoop) GetStartupInfo() map[string]interface{} { + info := make(map[string]interface{}) + + // Tools info + tools := al.tools.List() + info["tools"] = map[string]interface{}{ + "count": len(tools), + "names": tools, + } + + // Skills info + info["skills"] = al.contextBuilder.GetSkillsInfo() + + return info +} + +// formatMessagesForLog formats messages for logging +func formatMessagesForLog(messages []providers.Message) string { + if len(messages) == 0 { + return "[]" + } + + var result string + result += "[\n" + for i, msg := range messages { + result += fmt.Sprintf(" [%d] Role: %s\n", i, msg.Role) + if msg.ToolCalls != nil && len(msg.ToolCalls) > 0 { + result += " ToolCalls:\n" + for _, tc := range msg.ToolCalls { + result += fmt.Sprintf(" - ID: %s, Type: %s, Name: %s\n", tc.ID, tc.Type, tc.Name) + if tc.Function != nil { + result += fmt.Sprintf(" Arguments: %s\n", truncateString(tc.Function.Arguments, 200)) + } + } + } + if msg.Content != "" { + content := truncateString(msg.Content, 200) + result += fmt.Sprintf(" Content: %s\n", content) + } + if msg.ToolCallID != "" { + result += fmt.Sprintf(" ToolCallID: %s\n", msg.ToolCallID) + } + result += "\n" + } + result += "]" + return result +} + +// formatToolsForLog formats tool definitions for logging +func formatToolsForLog(tools []providers.ToolDefinition) string { + if len(tools) == 0 { + return "[]" + } + + var result string + result += "[\n" + for i, tool := range tools { + result += fmt.Sprintf(" [%d] Type: %s, Name: %s\n", i, tool.Type, tool.Function.Name) + result += fmt.Sprintf(" Description: %s\n", tool.Function.Description) + if len(tool.Function.Parameters) > 0 { + result += fmt.Sprintf(" Parameters: %s\n", truncateString(fmt.Sprintf("%v", tool.Function.Parameters), 200)) + } + } + result += "]" + return result +} + +// truncateString truncates a string to max length +func truncateString(s string, maxLen int) string { + if len(s) <= maxLen { + return s + } + if maxLen <= 3 { + return s[:maxLen] + } + return s[:maxLen-3] + "..." +} diff --git a/pkg/agent/memory.go b/pkg/agent/memory.go new file mode 100644 index 0000000..042ed18 --- /dev/null +++ b/pkg/agent/memory.go @@ -0,0 +1,161 @@ +// ClawGo - Ultra-lightweight personal AI agent +// Inspired by and based on nanobot: https://github.com/HKUDS/nanobot +// License: MIT +// +// Copyright (c) 2026 ClawGo contributors + +package agent + +import ( + "fmt" + "os" + "path/filepath" + "time" +) + +// MemoryStore manages persistent memory for the agent. +// - Long-term memory: memory/MEMORY.md +// - Daily notes: memory/YYYYMM/YYYYMMDD.md +type MemoryStore struct { + workspace string + memoryDir string + memoryFile string +} + +// NewMemoryStore creates a new MemoryStore with the given workspace path. +// It ensures the memory directory exists. +func NewMemoryStore(workspace string) *MemoryStore { + memoryDir := filepath.Join(workspace, "memory") + memoryFile := filepath.Join(memoryDir, "MEMORY.md") + + // Ensure memory directory exists + os.MkdirAll(memoryDir, 0755) + + return &MemoryStore{ + workspace: workspace, + memoryDir: memoryDir, + memoryFile: memoryFile, + } +} + +// getTodayFile returns the path to today's daily note file (memory/YYYYMM/YYYYMMDD.md). +func (ms *MemoryStore) getTodayFile() string { + today := time.Now().Format("20060102") // YYYYMMDD + monthDir := today[:6] // YYYYMM + filePath := filepath.Join(ms.memoryDir, monthDir, today+".md") + return filePath +} + +// ReadLongTerm reads the long-term memory (MEMORY.md). +// Returns empty string if the file doesn't exist. +func (ms *MemoryStore) ReadLongTerm() string { + if data, err := os.ReadFile(ms.memoryFile); err == nil { + return string(data) + } + return "" +} + +// WriteLongTerm writes content to the long-term memory file (MEMORY.md). +func (ms *MemoryStore) WriteLongTerm(content string) error { + return os.WriteFile(ms.memoryFile, []byte(content), 0644) +} + +// ReadToday reads today's daily note. +// Returns empty string if the file doesn't exist. +func (ms *MemoryStore) ReadToday() string { + todayFile := ms.getTodayFile() + if data, err := os.ReadFile(todayFile); err == nil { + return string(data) + } + return "" +} + +// AppendToday appends content to today's daily note. +// If the file doesn't exist, it creates a new file with a date header. +func (ms *MemoryStore) AppendToday(content string) error { + todayFile := ms.getTodayFile() + + // Ensure month directory exists + monthDir := filepath.Dir(todayFile) + os.MkdirAll(monthDir, 0755) + + var existingContent string + if data, err := os.ReadFile(todayFile); err == nil { + existingContent = string(data) + } + + var newContent string + if existingContent == "" { + // Add header for new day + header := fmt.Sprintf("# %s\n\n", time.Now().Format("2006-01-02")) + newContent = header + content + } else { + // Append to existing content + newContent = existingContent + "\n" + content + } + + return os.WriteFile(todayFile, []byte(newContent), 0644) +} + +// GetRecentDailyNotes returns daily notes from the last N days. +// Contents are joined with "---" separator. +func (ms *MemoryStore) GetRecentDailyNotes(days int) string { + var notes []string + + for i := 0; i < days; i++ { + date := time.Now().AddDate(0, 0, -i) + dateStr := date.Format("20060102") // YYYYMMDD + monthDir := dateStr[:6] // YYYYMM + filePath := filepath.Join(ms.memoryDir, monthDir, dateStr+".md") + + if data, err := os.ReadFile(filePath); err == nil { + notes = append(notes, string(data)) + } + } + + if len(notes) == 0 { + return "" + } + + // Join with separator + var result string + for i, note := range notes { + if i > 0 { + result += "\n\n---\n\n" + } + result += note + } + return result +} + +// GetMemoryContext returns formatted memory context for the agent prompt. +// Includes long-term memory and recent daily notes. +func (ms *MemoryStore) GetMemoryContext() string { + var parts []string + + // Long-term memory + longTerm := ms.ReadLongTerm() + if longTerm != "" { + parts = append(parts, "## Long-term Memory\n\n"+longTerm) + } + + // Recent daily notes (last 3 days) + recentNotes := ms.GetRecentDailyNotes(3) + if recentNotes != "" { + parts = append(parts, "## Recent Daily Notes\n\n"+recentNotes) + } + + if len(parts) == 0 { + return "" + } + + // Join parts with separator + var result string + for i, part := range parts { + if i > 0 { + result += "\n\n---\n\n" + } + result += part + } + return fmt.Sprintf("# Memory\n\n%s", result) +} diff --git a/pkg/bus/bus.go b/pkg/bus/bus.go new file mode 100644 index 0000000..6283251 --- /dev/null +++ b/pkg/bus/bus.go @@ -0,0 +1,65 @@ +package bus + +import ( + "context" + "sync" +) + +type MessageBus struct { + inbound chan InboundMessage + outbound chan OutboundMessage + handlers map[string]MessageHandler + mu sync.RWMutex +} + +func NewMessageBus() *MessageBus { + return &MessageBus{ + inbound: make(chan InboundMessage, 100), + outbound: make(chan OutboundMessage, 100), + handlers: make(map[string]MessageHandler), + } +} + +func (mb *MessageBus) PublishInbound(msg InboundMessage) { + mb.inbound <- msg +} + +func (mb *MessageBus) ConsumeInbound(ctx context.Context) (InboundMessage, bool) { + select { + case msg := <-mb.inbound: + return msg, true + case <-ctx.Done(): + return InboundMessage{}, false + } +} + +func (mb *MessageBus) PublishOutbound(msg OutboundMessage) { + mb.outbound <- msg +} + +func (mb *MessageBus) SubscribeOutbound(ctx context.Context) (OutboundMessage, bool) { + select { + case msg := <-mb.outbound: + return msg, true + case <-ctx.Done(): + return OutboundMessage{}, false + } +} + +func (mb *MessageBus) RegisterHandler(channel string, handler MessageHandler) { + mb.mu.Lock() + defer mb.mu.Unlock() + mb.handlers[channel] = handler +} + +func (mb *MessageBus) GetHandler(channel string) (MessageHandler, bool) { + mb.mu.RLock() + defer mb.mu.RUnlock() + handler, ok := mb.handlers[channel] + return handler, ok +} + +func (mb *MessageBus) Close() { + close(mb.inbound) + close(mb.outbound) +} diff --git a/pkg/bus/types.go b/pkg/bus/types.go new file mode 100644 index 0000000..44f9181 --- /dev/null +++ b/pkg/bus/types.go @@ -0,0 +1,19 @@ +package bus + +type InboundMessage struct { + Channel string `json:"channel"` + SenderID string `json:"sender_id"` + ChatID string `json:"chat_id"` + Content string `json:"content"` + Media []string `json:"media,omitempty"` + SessionKey string `json:"session_key"` + Metadata map[string]string `json:"metadata,omitempty"` +} + +type OutboundMessage struct { + Channel string `json:"channel"` + ChatID string `json:"chat_id"` + Content string `json:"content"` +} + +type MessageHandler func(InboundMessage) error diff --git a/pkg/channels/base.go b/pkg/channels/base.go new file mode 100644 index 0000000..06bd71c --- /dev/null +++ b/pkg/channels/base.go @@ -0,0 +1,82 @@ +package channels + +import ( + "context" + "fmt" + + "gitea.kkkk.dev/DBT/clawgo/pkg/bus" +) + +type Channel interface { + Name() string + Start(ctx context.Context) error + Stop(ctx context.Context) error + Send(ctx context.Context, msg bus.OutboundMessage) error + IsRunning() bool + IsAllowed(senderID string) bool +} + +type BaseChannel struct { + config interface{} + bus *bus.MessageBus + running bool + name string + allowList []string +} + +func NewBaseChannel(name string, config interface{}, bus *bus.MessageBus, allowList []string) *BaseChannel { + return &BaseChannel{ + config: config, + bus: bus, + name: name, + allowList: allowList, + running: false, + } +} + +func (c *BaseChannel) Name() string { + return c.name +} + +func (c *BaseChannel) IsRunning() bool { + return c.running +} + +func (c *BaseChannel) IsAllowed(senderID string) bool { + if len(c.allowList) == 0 { + return true + } + + for _, allowed := range c.allowList { + if senderID == allowed { + return true + } + } + + return false +} + +func (c *BaseChannel) HandleMessage(senderID, chatID, content string, media []string, metadata map[string]string) { + if !c.IsAllowed(senderID) { + return + } + + // 生成 SessionKey: channel:chatID + sessionKey := fmt.Sprintf("%s:%s", c.name, chatID) + + msg := bus.InboundMessage{ + Channel: c.name, + SenderID: senderID, + ChatID: chatID, + Content: content, + Media: media, + Metadata: metadata, + SessionKey: sessionKey, + } + + c.bus.PublishInbound(msg) +} + +func (c *BaseChannel) setRunning(running bool) { + c.running = running +} diff --git a/pkg/channels/dingtalk.go b/pkg/channels/dingtalk.go new file mode 100644 index 0000000..cf99cf7 --- /dev/null +++ b/pkg/channels/dingtalk.go @@ -0,0 +1,193 @@ +// ClawGo - Ultra-lightweight personal AI agent +// DingTalk channel implementation using Stream Mode + +package channels + +import ( + "context" + "fmt" + "log" + "sync" + + "github.com/open-dingtalk/dingtalk-stream-sdk-go/chatbot" + "github.com/open-dingtalk/dingtalk-stream-sdk-go/client" + "gitea.kkkk.dev/DBT/clawgo/pkg/bus" + "gitea.kkkk.dev/DBT/clawgo/pkg/config" +) + +// DingTalkChannel implements the Channel interface for DingTalk (钉钉) +// It uses WebSocket for receiving messages via stream mode and API for sending +type DingTalkChannel struct { + *BaseChannel + config config.DingTalkConfig + clientID string + clientSecret string + streamClient *client.StreamClient + ctx context.Context + cancel context.CancelFunc + // Map to store session webhooks for each chat + sessionWebhooks sync.Map // chatID -> sessionWebhook +} + +// NewDingTalkChannel creates a new DingTalk channel instance +func NewDingTalkChannel(cfg config.DingTalkConfig, messageBus *bus.MessageBus) (*DingTalkChannel, error) { + if cfg.ClientID == "" || cfg.ClientSecret == "" { + return nil, fmt.Errorf("dingtalk client_id and client_secret are required") + } + + base := NewBaseChannel("dingtalk", cfg, messageBus, cfg.AllowFrom) + + return &DingTalkChannel{ + BaseChannel: base, + config: cfg, + clientID: cfg.ClientID, + clientSecret: cfg.ClientSecret, + }, nil +} + +// Start initializes the DingTalk channel with Stream Mode +func (c *DingTalkChannel) Start(ctx context.Context) error { + log.Printf("Starting DingTalk channel (Stream Mode)...") + + c.ctx, c.cancel = context.WithCancel(ctx) + + // Create credential config + cred := client.NewAppCredentialConfig(c.clientID, c.clientSecret) + + // Create the stream client with options + c.streamClient = client.NewStreamClient( + client.WithAppCredential(cred), + client.WithAutoReconnect(true), + ) + + // Register chatbot callback handler (IChatBotMessageHandler is a function type) + c.streamClient.RegisterChatBotCallbackRouter(c.onChatBotMessageReceived) + + // Start the stream client + if err := c.streamClient.Start(c.ctx); err != nil { + return fmt.Errorf("failed to start stream client: %w", err) + } + + c.setRunning(true) + log.Println("DingTalk channel started (Stream Mode)") + return nil +} + +// Stop gracefully stops the DingTalk channel +func (c *DingTalkChannel) Stop(ctx context.Context) error { + log.Println("Stopping DingTalk channel...") + + if c.cancel != nil { + c.cancel() + } + + if c.streamClient != nil { + c.streamClient.Close() + } + + c.setRunning(false) + log.Println("DingTalk channel stopped") + return nil +} + +// Send sends a message to DingTalk via the chatbot reply API +func (c *DingTalkChannel) Send(ctx context.Context, msg bus.OutboundMessage) error { + if !c.IsRunning() { + return fmt.Errorf("dingtalk channel not running") + } + + // Get session webhook from storage + sessionWebhookRaw, ok := c.sessionWebhooks.Load(msg.ChatID) + if !ok { + return fmt.Errorf("no session_webhook found for chat %s, cannot send message", msg.ChatID) + } + + sessionWebhook, ok := sessionWebhookRaw.(string) + if !ok { + return fmt.Errorf("invalid session_webhook type for chat %s", msg.ChatID) + } + + log.Printf("DingTalk message to %s: %s", msg.ChatID, truncateStringDingTalk(msg.Content, 100)) + + // Use the session webhook to send the reply + return c.SendDirectReply(sessionWebhook, msg.Content) +} + +// onChatBotMessageReceived implements the IChatBotMessageHandler function signature +// This is called by the Stream SDK when a new message arrives +// IChatBotMessageHandler is: func(c context.Context, data *chatbot.BotCallbackDataModel) ([]byte, error) +func (c *DingTalkChannel) onChatBotMessageReceived(ctx context.Context, data *chatbot.BotCallbackDataModel) ([]byte, error) { + // Extract message content from Text field + content := data.Text.Content + if content == "" { + // Try to extract from Content interface{} if Text is empty + if contentMap, ok := data.Content.(map[string]interface{}); ok { + if textContent, ok := contentMap["content"].(string); ok { + content = textContent + } + } + } + + if content == "" { + return nil, nil // Ignore empty messages + } + + senderID := data.SenderStaffId + senderNick := data.SenderNick + chatID := senderID + if data.ConversationType != "1" { + // For group chats + chatID = data.ConversationId + } + + // Store the session webhook for this chat so we can reply later + c.sessionWebhooks.Store(chatID, data.SessionWebhook) + + metadata := map[string]string{ + "sender_name": senderNick, + "conversation_id": data.ConversationId, + "conversation_type": data.ConversationType, + "platform": "dingtalk", + "session_webhook": data.SessionWebhook, + } + + log.Printf("DingTalk message from %s (%s): %s", senderNick, senderID, truncateStringDingTalk(content, 50)) + + // Handle the message through the base channel + c.HandleMessage(senderID, chatID, content, nil, metadata) + + // Return nil to indicate we've handled the message asynchronously + // The response will be sent through the message bus + return nil, nil +} + +// SendDirectReply sends a direct reply using the session webhook +func (c *DingTalkChannel) SendDirectReply(sessionWebhook, content string) error { + replier := chatbot.NewChatbotReplier() + + // Convert string content to []byte for the API + contentBytes := []byte(content) + titleBytes := []byte("ClawGo") + + // Send markdown formatted reply + err := replier.SimpleReplyMarkdown( + context.Background(), + sessionWebhook, + titleBytes, + contentBytes, + ) + + if err != nil { + return fmt.Errorf("failed to send reply: %w", err) + } + + return nil +} + +// truncateStringDingTalk truncates a string to max length for logging (avoiding name collision with telegram.go) +func truncateStringDingTalk(s string, maxLen int) string { + if len(s) <= maxLen { + return s + } + return s[:maxLen] +} diff --git a/pkg/channels/discord.go b/pkg/channels/discord.go new file mode 100644 index 0000000..9756106 --- /dev/null +++ b/pkg/channels/discord.go @@ -0,0 +1,246 @@ +package channels + +import ( + "context" + "fmt" + "io" + "log" + "net/http" + "os" + "path/filepath" + "strings" + "time" + + "github.com/bwmarrin/discordgo" + "gitea.kkkk.dev/DBT/clawgo/pkg/bus" + "gitea.kkkk.dev/DBT/clawgo/pkg/config" + "gitea.kkkk.dev/DBT/clawgo/pkg/logger" + "gitea.kkkk.dev/DBT/clawgo/pkg/voice" +) + +type DiscordChannel struct { + *BaseChannel + session *discordgo.Session + config config.DiscordConfig + transcriber *voice.GroqTranscriber +} + +func NewDiscordChannel(cfg config.DiscordConfig, bus *bus.MessageBus) (*DiscordChannel, error) { + session, err := discordgo.New("Bot " + cfg.Token) + if err != nil { + return nil, fmt.Errorf("failed to create discord session: %w", err) + } + + base := NewBaseChannel("discord", cfg, bus, cfg.AllowFrom) + + return &DiscordChannel{ + BaseChannel: base, + session: session, + config: cfg, + transcriber: nil, + }, nil +} + +func (c *DiscordChannel) SetTranscriber(transcriber *voice.GroqTranscriber) { + c.transcriber = transcriber +} + +func (c *DiscordChannel) Start(ctx context.Context) error { + logger.InfoC("discord", "Starting Discord bot") + + c.session.AddHandler(c.handleMessage) + + if err := c.session.Open(); err != nil { + return fmt.Errorf("failed to open discord session: %w", err) + } + + c.setRunning(true) + + botUser, err := c.session.User("@me") + if err != nil { + return fmt.Errorf("failed to get bot user: %w", err) + } + logger.InfoCF("discord", "Discord bot connected", map[string]interface{}{ + "username": botUser.Username, + "user_id": botUser.ID, + }) + + return nil +} + +func (c *DiscordChannel) Stop(ctx context.Context) error { + logger.InfoC("discord", "Stopping Discord bot") + c.setRunning(false) + + if err := c.session.Close(); err != nil { + return fmt.Errorf("failed to close discord session: %w", err) + } + + return nil +} + +func (c *DiscordChannel) Send(ctx context.Context, msg bus.OutboundMessage) error { + if !c.IsRunning() { + return fmt.Errorf("discord bot not running") + } + + channelID := msg.ChatID + if channelID == "" { + return fmt.Errorf("channel ID is empty") + } + + message := msg.Content + + if _, err := c.session.ChannelMessageSend(channelID, message); err != nil { + return fmt.Errorf("failed to send discord message: %w", err) + } + + return nil +} + +func (c *DiscordChannel) handleMessage(s *discordgo.Session, m *discordgo.MessageCreate) { + if m == nil || m.Author == nil { + return + } + + if m.Author.ID == s.State.User.ID { + return + } + + senderID := m.Author.ID + senderName := m.Author.Username + if m.Author.Discriminator != "" && m.Author.Discriminator != "0" { + senderName += "#" + m.Author.Discriminator + } + + content := m.Content + mediaPaths := []string{} + + for _, attachment := range m.Attachments { + isAudio := isAudioFile(attachment.Filename, attachment.ContentType) + + if isAudio { + localPath := c.downloadAttachment(attachment.URL, attachment.Filename) + if localPath != "" { + mediaPaths = append(mediaPaths, localPath) + + transcribedText := "" + if c.transcriber != nil && c.transcriber.IsAvailable() { + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + result, err := c.transcriber.Transcribe(ctx, localPath) + if err != nil { + log.Printf("Voice transcription failed: %v", err) + transcribedText = fmt.Sprintf("[audio: %s (transcription failed)]", localPath) + } else { + transcribedText = fmt.Sprintf("[audio transcription: %s]", result.Text) + log.Printf("Audio transcribed successfully: %s", result.Text) + } + } else { + transcribedText = fmt.Sprintf("[audio: %s]", localPath) + } + + if content != "" { + content += "\n" + } + content += transcribedText + } else { + mediaPaths = append(mediaPaths, attachment.URL) + if content != "" { + content += "\n" + } + content += fmt.Sprintf("[attachment: %s]", attachment.URL) + } + } else { + mediaPaths = append(mediaPaths, attachment.URL) + if content != "" { + content += "\n" + } + content += fmt.Sprintf("[attachment: %s]", attachment.URL) + } + } + + if content == "" && len(mediaPaths) == 0 { + return + } + + if content == "" { + content = "[media only]" + } + + logger.DebugCF("discord", "Received message", map[string]interface{}{ + "sender_name": senderName, + "sender_id": senderID, + "preview": truncateString(content, 50), + }) + + metadata := map[string]string{ + "message_id": m.ID, + "user_id": senderID, + "username": m.Author.Username, + "display_name": senderName, + "guild_id": m.GuildID, + "channel_id": m.ChannelID, + "is_dm": fmt.Sprintf("%t", m.GuildID == ""), + } + + c.HandleMessage(senderID, m.ChannelID, content, mediaPaths, metadata) +} + +func isAudioFile(filename, contentType string) bool { + audioExtensions := []string{".mp3", ".wav", ".ogg", ".m4a", ".flac", ".aac", ".wma"} + audioTypes := []string{"audio/", "application/ogg", "application/x-ogg"} + + for _, ext := range audioExtensions { + if strings.HasSuffix(strings.ToLower(filename), ext) { + return true + } + } + + for _, audioType := range audioTypes { + if strings.HasPrefix(strings.ToLower(contentType), audioType) { + return true + } + } + + return false +} + +func (c *DiscordChannel) downloadAttachment(url, filename string) string { + mediaDir := filepath.Join(os.TempDir(), "clawgo_media") + if err := os.MkdirAll(mediaDir, 0755); err != nil { + log.Printf("Failed to create media directory: %v", err) + return "" + } + + localPath := filepath.Join(mediaDir, filename) + + resp, err := http.Get(url) + if err != nil { + log.Printf("Failed to download attachment: %v", err) + return "" + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + log.Printf("Failed to download attachment, status: %d", resp.StatusCode) + return "" + } + + out, err := os.Create(localPath) + if err != nil { + log.Printf("Failed to create file: %v", err) + return "" + } + defer out.Close() + + _, err = io.Copy(out, resp.Body) + if err != nil { + log.Printf("Failed to write file: %v", err) + return "" + } + + log.Printf("Attachment downloaded successfully to: %s", localPath) + return localPath +} diff --git a/pkg/channels/feishu.go b/pkg/channels/feishu.go new file mode 100644 index 0000000..6bfa4d0 --- /dev/null +++ b/pkg/channels/feishu.go @@ -0,0 +1,215 @@ +package channels + +import ( + "context" + "encoding/json" + "fmt" + "sync" + "time" + + lark "github.com/larksuite/oapi-sdk-go/v3" + larkdispatcher "github.com/larksuite/oapi-sdk-go/v3/event/dispatcher" + larkim "github.com/larksuite/oapi-sdk-go/v3/service/im/v1" + larkws "github.com/larksuite/oapi-sdk-go/v3/ws" + + "gitea.kkkk.dev/DBT/clawgo/pkg/bus" + "gitea.kkkk.dev/DBT/clawgo/pkg/config" + "gitea.kkkk.dev/DBT/clawgo/pkg/logger" +) + +type FeishuChannel struct { + *BaseChannel + config config.FeishuConfig + client *lark.Client + wsClient *larkws.Client + + mu sync.Mutex + cancel context.CancelFunc +} + +func NewFeishuChannel(cfg config.FeishuConfig, bus *bus.MessageBus) (*FeishuChannel, error) { + base := NewBaseChannel("feishu", cfg, bus, cfg.AllowFrom) + + return &FeishuChannel{ + BaseChannel: base, + config: cfg, + client: lark.NewClient(cfg.AppID, cfg.AppSecret), + }, nil +} + +func (c *FeishuChannel) Start(ctx context.Context) error { + if c.config.AppID == "" || c.config.AppSecret == "" { + return fmt.Errorf("feishu app_id or app_secret is empty") + } + + dispatcher := larkdispatcher.NewEventDispatcher(c.config.VerificationToken, c.config.EncryptKey). + OnP2MessageReceiveV1(c.handleMessageReceive) + + runCtx, cancel := context.WithCancel(ctx) + + c.mu.Lock() + c.cancel = cancel + c.wsClient = larkws.NewClient( + c.config.AppID, + c.config.AppSecret, + larkws.WithEventHandler(dispatcher), + ) + wsClient := c.wsClient + c.mu.Unlock() + + c.setRunning(true) + logger.InfoC("feishu", "Feishu channel started (websocket mode)") + + go func() { + if err := wsClient.Start(runCtx); err != nil { + logger.ErrorCF("feishu", "Feishu websocket stopped with error", map[string]interface{}{ + "error": err.Error(), + }) + } + }() + + return nil +} + +func (c *FeishuChannel) Stop(ctx context.Context) error { + c.mu.Lock() + if c.cancel != nil { + c.cancel() + c.cancel = nil + } + c.wsClient = nil + c.mu.Unlock() + + c.setRunning(false) + logger.InfoC("feishu", "Feishu channel stopped") + return nil +} + +func (c *FeishuChannel) Send(ctx context.Context, msg bus.OutboundMessage) error { + if !c.IsRunning() { + return fmt.Errorf("feishu channel not running") + } + + if msg.ChatID == "" { + return fmt.Errorf("chat ID is empty") + } + + payload, err := json.Marshal(map[string]string{"text": msg.Content}) + if err != nil { + return fmt.Errorf("failed to marshal feishu content: %w", err) + } + + req := larkim.NewCreateMessageReqBuilder(). + ReceiveIdType(larkim.ReceiveIdTypeChatId). + Body(larkim.NewCreateMessageReqBodyBuilder(). + ReceiveId(msg.ChatID). + MsgType(larkim.MsgTypeText). + Content(string(payload)). + Uuid(fmt.Sprintf("clawgo-%d", time.Now().UnixNano())). + Build()). + Build() + + resp, err := c.client.Im.V1.Message.Create(ctx, req) + if err != nil { + return fmt.Errorf("failed to send feishu message: %w", err) + } + + if !resp.Success() { + return fmt.Errorf("feishu api error: code=%d msg=%s", resp.Code, resp.Msg) + } + + logger.DebugCF("feishu", "Feishu message sent", map[string]interface{}{ + "chat_id": msg.ChatID, + }) + + return nil +} + +func (c *FeishuChannel) handleMessageReceive(_ context.Context, event *larkim.P2MessageReceiveV1) error { + if event == nil || event.Event == nil || event.Event.Message == nil { + return nil + } + + message := event.Event.Message + sender := event.Event.Sender + + chatID := stringValue(message.ChatId) + if chatID == "" { + return nil + } + + senderID := extractFeishuSenderID(sender) + if senderID == "" { + senderID = "unknown" + } + + content := extractFeishuMessageContent(message) + if content == "" { + content = "[empty message]" + } + + metadata := map[string]string{} + if messageID := stringValue(message.MessageId); messageID != "" { + metadata["message_id"] = messageID + } + if messageType := stringValue(message.MessageType); messageType != "" { + metadata["message_type"] = messageType + } + if chatType := stringValue(message.ChatType); chatType != "" { + metadata["chat_type"] = chatType + } + if sender != nil && sender.TenantKey != nil { + metadata["tenant_key"] = *sender.TenantKey + } + + logger.InfoCF("feishu", "Feishu message received", map[string]interface{}{ + "sender_id": senderID, + "chat_id": chatID, + "preview": truncateString(content, 80), + }) + + c.HandleMessage(senderID, chatID, content, nil, metadata) + return nil +} + +func extractFeishuSenderID(sender *larkim.EventSender) string { + if sender == nil || sender.SenderId == nil { + return "" + } + + if sender.SenderId.UserId != nil && *sender.SenderId.UserId != "" { + return *sender.SenderId.UserId + } + if sender.SenderId.OpenId != nil && *sender.SenderId.OpenId != "" { + return *sender.SenderId.OpenId + } + if sender.SenderId.UnionId != nil && *sender.SenderId.UnionId != "" { + return *sender.SenderId.UnionId + } + + return "" +} + +func extractFeishuMessageContent(message *larkim.EventMessage) string { + if message == nil || message.Content == nil || *message.Content == "" { + return "" + } + + if message.MessageType != nil && *message.MessageType == larkim.MsgTypeText { + var textPayload struct { + Text string `json:"text"` + } + if err := json.Unmarshal([]byte(*message.Content), &textPayload); err == nil { + return textPayload.Text + } + } + + return *message.Content +} + +func stringValue(v *string) string { + if v == nil { + return "" + } + return *v +} diff --git a/pkg/channels/maixcam.go b/pkg/channels/maixcam.go new file mode 100644 index 0000000..80f367b --- /dev/null +++ b/pkg/channels/maixcam.go @@ -0,0 +1,243 @@ +package channels + +import ( + "context" + "encoding/json" + "fmt" + "net" + "sync" + + "gitea.kkkk.dev/DBT/clawgo/pkg/bus" + "gitea.kkkk.dev/DBT/clawgo/pkg/config" + "gitea.kkkk.dev/DBT/clawgo/pkg/logger" +) + +type MaixCamChannel struct { + *BaseChannel + config config.MaixCamConfig + listener net.Listener + clients map[net.Conn]bool + clientsMux sync.RWMutex + running bool +} + +type MaixCamMessage struct { + Type string `json:"type"` + Tips string `json:"tips"` + Timestamp float64 `json:"timestamp"` + Data map[string]interface{} `json:"data"` +} + +func NewMaixCamChannel(cfg config.MaixCamConfig, bus *bus.MessageBus) (*MaixCamChannel, error) { + base := NewBaseChannel("maixcam", cfg, bus, cfg.AllowFrom) + + return &MaixCamChannel{ + BaseChannel: base, + config: cfg, + clients: make(map[net.Conn]bool), + running: false, + }, nil +} + +func (c *MaixCamChannel) Start(ctx context.Context) error { + logger.InfoC("maixcam", "Starting MaixCam channel server") + + addr := fmt.Sprintf("%s:%d", c.config.Host, c.config.Port) + listener, err := net.Listen("tcp", addr) + if err != nil { + return fmt.Errorf("failed to listen on %s: %w", addr, err) + } + + c.listener = listener + c.setRunning(true) + + logger.InfoCF("maixcam", "MaixCam server listening", map[string]interface{}{ + "host": c.config.Host, + "port": c.config.Port, + }) + + go c.acceptConnections(ctx) + + return nil +} + +func (c *MaixCamChannel) acceptConnections(ctx context.Context) { + logger.DebugC("maixcam", "Starting connection acceptor") + + for { + select { + case <-ctx.Done(): + logger.InfoC("maixcam", "Stopping connection acceptor") + return + default: + conn, err := c.listener.Accept() + if err != nil { + if c.running { + logger.ErrorCF("maixcam", "Failed to accept connection", map[string]interface{}{ + "error": err.Error(), + }) + } + return + } + + logger.InfoCF("maixcam", "New connection from MaixCam device", map[string]interface{}{ + "remote_addr": conn.RemoteAddr().String(), + }) + + c.clientsMux.Lock() + c.clients[conn] = true + c.clientsMux.Unlock() + + go c.handleConnection(conn, ctx) + } + } +} + +func (c *MaixCamChannel) handleConnection(conn net.Conn, ctx context.Context) { + logger.DebugC("maixcam", "Handling MaixCam connection") + + defer func() { + conn.Close() + c.clientsMux.Lock() + delete(c.clients, conn) + c.clientsMux.Unlock() + logger.DebugC("maixcam", "Connection closed") + }() + + decoder := json.NewDecoder(conn) + + for { + select { + case <-ctx.Done(): + return + default: + var msg MaixCamMessage + if err := decoder.Decode(&msg); err != nil { + if err.Error() != "EOF" { + logger.ErrorCF("maixcam", "Failed to decode message", map[string]interface{}{ + "error": err.Error(), + }) + } + return + } + + c.processMessage(msg, conn) + } + } +} + +func (c *MaixCamChannel) processMessage(msg MaixCamMessage, conn net.Conn) { + switch msg.Type { + case "person_detected": + c.handlePersonDetection(msg) + case "heartbeat": + logger.DebugC("maixcam", "Received heartbeat") + case "status": + c.handleStatusUpdate(msg) + default: + logger.WarnCF("maixcam", "Unknown message type", map[string]interface{}{ + "type": msg.Type, + }) + } +} + +func (c *MaixCamChannel) handlePersonDetection(msg MaixCamMessage) { + logger.InfoCF("maixcam", "", map[string]interface{}{ + "timestamp": msg.Timestamp, + "data": msg.Data, + }) + + senderID := "maixcam" + chatID := "default" + + classInfo, ok := msg.Data["class_name"].(string) + if !ok { + classInfo = "person" + } + + score, _ := msg.Data["score"].(float64) + x, _ := msg.Data["x"].(float64) + y, _ := msg.Data["y"].(float64) + w, _ := msg.Data["w"].(float64) + h, _ := msg.Data["h"].(float64) + + content := fmt.Sprintf("📷 Person detected!\nClass: %s\nConfidence: %.2f%%\nPosition: (%.0f, %.0f)\nSize: %.0fx%.0f", + classInfo, score*100, x, y, w, h) + + metadata := map[string]string{ + "timestamp": fmt.Sprintf("%.0f", msg.Timestamp), + "class_id": fmt.Sprintf("%.0f", msg.Data["class_id"]), + "score": fmt.Sprintf("%.2f", score), + "x": fmt.Sprintf("%.0f", x), + "y": fmt.Sprintf("%.0f", y), + "w": fmt.Sprintf("%.0f", w), + "h": fmt.Sprintf("%.0f", h), + } + + c.HandleMessage(senderID, chatID, content, []string{}, metadata) +} + +func (c *MaixCamChannel) handleStatusUpdate(msg MaixCamMessage) { + logger.InfoCF("maixcam", "Status update from MaixCam", map[string]interface{}{ + "status": msg.Data, + }) +} + +func (c *MaixCamChannel) Stop(ctx context.Context) error { + logger.InfoC("maixcam", "Stopping MaixCam channel") + c.setRunning(false) + + if c.listener != nil { + c.listener.Close() + } + + c.clientsMux.Lock() + defer c.clientsMux.Unlock() + + for conn := range c.clients { + conn.Close() + } + c.clients = make(map[net.Conn]bool) + + logger.InfoC("maixcam", "MaixCam channel stopped") + return nil +} + +func (c *MaixCamChannel) Send(ctx context.Context, msg bus.OutboundMessage) error { + if !c.IsRunning() { + return fmt.Errorf("maixcam channel not running") + } + + c.clientsMux.RLock() + defer c.clientsMux.RUnlock() + + if len(c.clients) == 0 { + logger.WarnC("maixcam", "No MaixCam devices connected") + return fmt.Errorf("no connected MaixCam devices") + } + + response := map[string]interface{}{ + "type": "command", + "timestamp": float64(0), + "message": msg.Content, + "chat_id": msg.ChatID, + } + + data, err := json.Marshal(response) + if err != nil { + return fmt.Errorf("failed to marshal response: %w", err) + } + + var sendErr error + for conn := range c.clients { + if _, err := conn.Write(data); err != nil { + logger.ErrorCF("maixcam", "Failed to send to client", map[string]interface{}{ + "client": conn.RemoteAddr().String(), + "error": err.Error(), + }) + sendErr = err + } + } + + return sendErr +} diff --git a/pkg/channels/manager.go b/pkg/channels/manager.go new file mode 100644 index 0000000..13dfe73 --- /dev/null +++ b/pkg/channels/manager.go @@ -0,0 +1,300 @@ +// ClawGo - Ultra-lightweight personal AI agent +// Inspired by and based on nanobot: https://github.com/HKUDS/nanobot +// License: MIT +// +// Copyright (c) 2026 ClawGo contributors + +package channels + +import ( + "context" + "fmt" + "sync" + + "gitea.kkkk.dev/DBT/clawgo/pkg/bus" + "gitea.kkkk.dev/DBT/clawgo/pkg/config" + "gitea.kkkk.dev/DBT/clawgo/pkg/logger" +) + +type Manager struct { + channels map[string]Channel + bus *bus.MessageBus + config *config.Config + dispatchTask *asyncTask + mu sync.RWMutex +} + +type asyncTask struct { + cancel context.CancelFunc +} + +func NewManager(cfg *config.Config, messageBus *bus.MessageBus) (*Manager, error) { + m := &Manager{ + channels: make(map[string]Channel), + bus: messageBus, + config: cfg, + } + + if err := m.initChannels(); err != nil { + return nil, err + } + + return m, nil +} + +func (m *Manager) initChannels() error { + logger.InfoC("channels", "Initializing channel manager") + + if m.config.Channels.Telegram.Enabled && m.config.Channels.Telegram.Token != "" { + logger.DebugC("channels", "Attempting to initialize Telegram channel") + telegram, err := NewTelegramChannel(m.config.Channels.Telegram, m.bus) + if err != nil { + logger.ErrorCF("channels", "Failed to initialize Telegram channel", map[string]interface{}{ + "error": err.Error(), + }) + } else { + m.channels["telegram"] = telegram + logger.InfoC("channels", "Telegram channel enabled successfully") + } + } + + if m.config.Channels.WhatsApp.Enabled && m.config.Channels.WhatsApp.BridgeURL != "" { + logger.DebugC("channels", "Attempting to initialize WhatsApp channel") + whatsapp, err := NewWhatsAppChannel(m.config.Channels.WhatsApp, m.bus) + if err != nil { + logger.ErrorCF("channels", "Failed to initialize WhatsApp channel", map[string]interface{}{ + "error": err.Error(), + }) + } else { + m.channels["whatsapp"] = whatsapp + logger.InfoC("channels", "WhatsApp channel enabled successfully") + } + } + + if m.config.Channels.Feishu.Enabled { + logger.DebugC("channels", "Attempting to initialize Feishu channel") + feishu, err := NewFeishuChannel(m.config.Channels.Feishu, m.bus) + if err != nil { + logger.ErrorCF("channels", "Failed to initialize Feishu channel", map[string]interface{}{ + "error": err.Error(), + }) + } else { + m.channels["feishu"] = feishu + logger.InfoC("channels", "Feishu channel enabled successfully") + } + } + + if m.config.Channels.Discord.Enabled && m.config.Channels.Discord.Token != "" { + logger.DebugC("channels", "Attempting to initialize Discord channel") + discord, err := NewDiscordChannel(m.config.Channels.Discord, m.bus) + if err != nil { + logger.ErrorCF("channels", "Failed to initialize Discord channel", map[string]interface{}{ + "error": err.Error(), + }) + } else { + m.channels["discord"] = discord + logger.InfoC("channels", "Discord channel enabled successfully") + } + } + + if m.config.Channels.MaixCam.Enabled { + logger.DebugC("channels", "Attempting to initialize MaixCam channel") + maixcam, err := NewMaixCamChannel(m.config.Channels.MaixCam, m.bus) + if err != nil { + logger.ErrorCF("channels", "Failed to initialize MaixCam channel", map[string]interface{}{ + "error": err.Error(), + }) + } else { + m.channels["maixcam"] = maixcam + logger.InfoC("channels", "MaixCam channel enabled successfully") + } + } + + if m.config.Channels.QQ.Enabled { + logger.DebugC("channels", "Attempting to initialize QQ channel") + qq, err := NewQQChannel(m.config.Channels.QQ, m.bus) + if err != nil { + logger.ErrorCF("channels", "Failed to initialize QQ channel", map[string]interface{}{ + "error": err.Error(), + }) + } else { + m.channels["qq"] = qq + logger.InfoC("channels", "QQ channel enabled successfully") + } + } + + if m.config.Channels.DingTalk.Enabled && m.config.Channels.DingTalk.ClientID != "" { + logger.DebugC("channels", "Attempting to initialize DingTalk channel") + dingtalk, err := NewDingTalkChannel(m.config.Channels.DingTalk, m.bus) + if err != nil { + logger.ErrorCF("channels", "Failed to initialize DingTalk channel", map[string]interface{}{ + "error": err.Error(), + }) + } else { + m.channels["dingtalk"] = dingtalk + logger.InfoC("channels", "DingTalk channel enabled successfully") + } + } + + logger.InfoCF("channels", "Channel initialization completed", map[string]interface{}{ + "enabled_channels": len(m.channels), + }) + + return nil +} + +func (m *Manager) StartAll(ctx context.Context) error { + m.mu.Lock() + defer m.mu.Unlock() + + if len(m.channels) == 0 { + logger.WarnC("channels", "No channels enabled") + return nil + } + + logger.InfoC("channels", "Starting all channels") + + dispatchCtx, cancel := context.WithCancel(ctx) + m.dispatchTask = &asyncTask{cancel: cancel} + + go m.dispatchOutbound(dispatchCtx) + + for name, channel := range m.channels { + logger.InfoCF("channels", "Starting channel", map[string]interface{}{ + "channel": name, + }) + if err := channel.Start(ctx); err != nil { + logger.ErrorCF("channels", "Failed to start channel", map[string]interface{}{ + "channel": name, + "error": err.Error(), + }) + } + } + + logger.InfoC("channels", "All channels started") + return nil +} + +func (m *Manager) StopAll(ctx context.Context) error { + m.mu.Lock() + defer m.mu.Unlock() + + logger.InfoC("channels", "Stopping all channels") + + if m.dispatchTask != nil { + m.dispatchTask.cancel() + m.dispatchTask = nil + } + + for name, channel := range m.channels { + logger.InfoCF("channels", "Stopping channel", map[string]interface{}{ + "channel": name, + }) + if err := channel.Stop(ctx); err != nil { + logger.ErrorCF("channels", "Error stopping channel", map[string]interface{}{ + "channel": name, + "error": err.Error(), + }) + } + } + + logger.InfoC("channels", "All channels stopped") + return nil +} + +func (m *Manager) dispatchOutbound(ctx context.Context) { + logger.InfoC("channels", "Outbound dispatcher started") + + for { + select { + case <-ctx.Done(): + logger.InfoC("channels", "Outbound dispatcher stopped") + return + default: + msg, ok := m.bus.SubscribeOutbound(ctx) + if !ok { + continue + } + + m.mu.RLock() + channel, exists := m.channels[msg.Channel] + m.mu.RUnlock() + + if !exists { + logger.WarnCF("channels", "Unknown channel for outbound message", map[string]interface{}{ + "channel": msg.Channel, + }) + continue + } + + if err := channel.Send(ctx, msg); err != nil { + logger.ErrorCF("channels", "Error sending message to channel", map[string]interface{}{ + "channel": msg.Channel, + "error": err.Error(), + }) + } + } + } +} + +func (m *Manager) GetChannel(name string) (Channel, bool) { + m.mu.RLock() + defer m.mu.RUnlock() + channel, ok := m.channels[name] + return channel, ok +} + +func (m *Manager) GetStatus() map[string]interface{} { + m.mu.RLock() + defer m.mu.RUnlock() + + status := make(map[string]interface{}) + for name, channel := range m.channels { + status[name] = map[string]interface{}{ + "enabled": true, + "running": channel.IsRunning(), + } + } + return status +} + +func (m *Manager) GetEnabledChannels() []string { + m.mu.RLock() + defer m.mu.RUnlock() + + names := make([]string, 0, len(m.channels)) + for name := range m.channels { + names = append(names, name) + } + return names +} + +func (m *Manager) RegisterChannel(name string, channel Channel) { + m.mu.Lock() + defer m.mu.Unlock() + m.channels[name] = channel +} + +func (m *Manager) UnregisterChannel(name string) { + m.mu.Lock() + defer m.mu.Unlock() + delete(m.channels, name) +} + +func (m *Manager) SendToChannel(ctx context.Context, channelName, chatID, content string) error { + m.mu.RLock() + channel, exists := m.channels[channelName] + m.mu.RUnlock() + + if !exists { + return fmt.Errorf("channel %s not found", channelName) + } + + msg := bus.OutboundMessage{ + Channel: channelName, + ChatID: chatID, + Content: content, + } + + return channel.Send(ctx, msg) +} diff --git a/pkg/channels/qq.go b/pkg/channels/qq.go new file mode 100644 index 0000000..cbc88c8 --- /dev/null +++ b/pkg/channels/qq.go @@ -0,0 +1,243 @@ +package channels + +import ( + "context" + "fmt" + "sync" + "time" + + "github.com/tencent-connect/botgo" + "github.com/tencent-connect/botgo/dto" + "github.com/tencent-connect/botgo/event" + "github.com/tencent-connect/botgo/openapi" + "github.com/tencent-connect/botgo/token" + "golang.org/x/oauth2" + + "gitea.kkkk.dev/DBT/clawgo/pkg/bus" + "gitea.kkkk.dev/DBT/clawgo/pkg/config" + "gitea.kkkk.dev/DBT/clawgo/pkg/logger" +) + +type QQChannel struct { + *BaseChannel + config config.QQConfig + api openapi.OpenAPI + tokenSource oauth2.TokenSource + ctx context.Context + cancel context.CancelFunc + sessionManager botgo.SessionManager + processedIDs map[string]bool + mu sync.RWMutex +} + +func NewQQChannel(cfg config.QQConfig, messageBus *bus.MessageBus) (*QQChannel, error) { + base := NewBaseChannel("qq", cfg, messageBus, cfg.AllowFrom) + + return &QQChannel{ + BaseChannel: base, + config: cfg, + processedIDs: make(map[string]bool), + }, nil +} + +func (c *QQChannel) Start(ctx context.Context) error { + if c.config.AppID == "" || c.config.AppSecret == "" { + return fmt.Errorf("QQ app_id and app_secret not configured") + } + + logger.InfoC("qq", "Starting QQ bot (WebSocket mode)") + + // 创建 token source + credentials := &token.QQBotCredentials{ + AppID: c.config.AppID, + AppSecret: c.config.AppSecret, + } + c.tokenSource = token.NewQQBotTokenSource(credentials) + + // 创建子 context + c.ctx, c.cancel = context.WithCancel(ctx) + + // 启动自动刷新 token 协程 + if err := token.StartRefreshAccessToken(c.ctx, c.tokenSource); err != nil { + return fmt.Errorf("failed to start token refresh: %w", err) + } + + // 初始化 OpenAPI 客户端 + c.api = botgo.NewOpenAPI(c.config.AppID, c.tokenSource).WithTimeout(5 * time.Second) + + // 注册事件处理器 + intent := event.RegisterHandlers( + c.handleC2CMessage(), + c.handleGroupATMessage(), + ) + + // 获取 WebSocket 接入点 + wsInfo, err := c.api.WS(c.ctx, nil, "") + if err != nil { + return fmt.Errorf("failed to get websocket info: %w", err) + } + + logger.InfoCF("qq", "Got WebSocket info", map[string]interface{}{ + "shards": wsInfo.Shards, + }) + + // 创建并保存 sessionManager + c.sessionManager = botgo.NewSessionManager() + + // 在 goroutine 中启动 WebSocket 连接,避免阻塞 + go func() { + if err := c.sessionManager.Start(wsInfo, c.tokenSource, &intent); err != nil { + logger.ErrorCF("qq", "WebSocket session error", map[string]interface{}{ + "error": err.Error(), + }) + c.setRunning(false) + } + }() + + c.setRunning(true) + logger.InfoC("qq", "QQ bot started successfully") + + return nil +} + +func (c *QQChannel) Stop(ctx context.Context) error { + logger.InfoC("qq", "Stopping QQ bot") + c.setRunning(false) + + if c.cancel != nil { + c.cancel() + } + + return nil +} + +func (c *QQChannel) Send(ctx context.Context, msg bus.OutboundMessage) error { + if !c.IsRunning() { + return fmt.Errorf("QQ bot not running") + } + + // 构造消息 + msgToCreate := &dto.MessageToCreate{ + Content: msg.Content, + } + + // C2C 消息发送 + _, err := c.api.PostC2CMessage(ctx, msg.ChatID, msgToCreate) + if err != nil { + logger.ErrorCF("qq", "Failed to send C2C message", map[string]interface{}{ + "error": err.Error(), + }) + return err + } + + return nil +} + +// handleC2CMessage 处理 QQ 私聊消息 +func (c *QQChannel) handleC2CMessage() event.C2CMessageEventHandler { + return func(event *dto.WSPayload, data *dto.WSC2CMessageData) error { + // 去重检查 + if c.isDuplicate(data.ID) { + return nil + } + + // 提取用户信息 + var senderID string + if data.Author != nil && data.Author.ID != "" { + senderID = data.Author.ID + } else { + logger.WarnC("qq", "Received message with no sender ID") + return nil + } + + // 提取消息内容 + content := data.Content + if content == "" { + logger.DebugC("qq", "Received empty message, ignoring") + return nil + } + + logger.InfoCF("qq", "Received C2C message", map[string]interface{}{ + "sender": senderID, + "length": len(content), + }) + + // 转发到消息总线 + metadata := map[string]string{ + "message_id": data.ID, + } + + c.HandleMessage(senderID, senderID, content, []string{}, metadata) + + return nil + } +} + +// handleGroupATMessage 处理群@消息 +func (c *QQChannel) handleGroupATMessage() event.GroupATMessageEventHandler { + return func(event *dto.WSPayload, data *dto.WSGroupATMessageData) error { + // 去重检查 + if c.isDuplicate(data.ID) { + return nil + } + + // 提取用户信息 + var senderID string + if data.Author != nil && data.Author.ID != "" { + senderID = data.Author.ID + } else { + logger.WarnC("qq", "Received group message with no sender ID") + return nil + } + + // 提取消息内容(去掉 @ 机器人部分) + content := data.Content + if content == "" { + logger.DebugC("qq", "Received empty group message, ignoring") + return nil + } + + logger.InfoCF("qq", "Received group AT message", map[string]interface{}{ + "sender": senderID, + "group": data.GroupID, + "length": len(content), + }) + + // 转发到消息总线(使用 GroupID 作为 ChatID) + metadata := map[string]string{ + "message_id": data.ID, + "group_id": data.GroupID, + } + + c.HandleMessage(senderID, data.GroupID, content, []string{}, metadata) + + return nil + } +} + +// isDuplicate 检查消息是否重复 +func (c *QQChannel) isDuplicate(messageID string) bool { + c.mu.Lock() + defer c.mu.Unlock() + + if c.processedIDs[messageID] { + return true + } + + c.processedIDs[messageID] = true + + // 简单清理:限制 map 大小 + if len(c.processedIDs) > 10000 { + // 清空一半 + count := 0 + for id := range c.processedIDs { + if count >= 5000 { + break + } + delete(c.processedIDs, id) + count++ + } + } + + return false +} diff --git a/pkg/channels/telegram.go b/pkg/channels/telegram.go new file mode 100644 index 0000000..0ee09c0 --- /dev/null +++ b/pkg/channels/telegram.go @@ -0,0 +1,475 @@ +package channels + +import ( + "context" + "fmt" + "io" + "log" + "net/http" + "os" + "path/filepath" + "regexp" + "strings" + "sync" + "time" + + "github.com/mymmrac/telego" + "github.com/mymmrac/telego/telegoutil" + + "gitea.kkkk.dev/DBT/clawgo/pkg/bus" + "gitea.kkkk.dev/DBT/clawgo/pkg/config" + "gitea.kkkk.dev/DBT/clawgo/pkg/voice" +) + +type TelegramChannel struct { + *BaseChannel + bot *telego.Bot + config config.TelegramConfig + chatIDs map[string]int64 + updates <-chan telego.Update + transcriber *voice.GroqTranscriber + placeholders sync.Map // chatID -> messageID + stopThinking sync.Map // chatID -> chan struct{} +} + +func NewTelegramChannel(cfg config.TelegramConfig, bus *bus.MessageBus) (*TelegramChannel, error) { + bot, err := telego.NewBot(cfg.Token, telego.WithDefaultLogger(false, false)) + if err != nil { + return nil, fmt.Errorf("failed to create telegram bot: %w", err) + } + + base := NewBaseChannel("telegram", cfg, bus, cfg.AllowFrom) + + return &TelegramChannel{ + BaseChannel: base, + bot: bot, + config: cfg, + chatIDs: make(map[string]int64), + transcriber: nil, + placeholders: sync.Map{}, + stopThinking: sync.Map{}, + }, nil +} + +func (c *TelegramChannel) SetTranscriber(transcriber *voice.GroqTranscriber) { + c.transcriber = transcriber +} + +func (c *TelegramChannel) Start(ctx context.Context) error { + log.Printf("Starting Telegram bot (polling mode)...") + + updates, err := c.bot.UpdatesViaLongPolling(nil) + if err != nil { + return fmt.Errorf("failed to start updates polling: %w", err) + } + c.updates = updates + + c.setRunning(true) + + botInfo, err := c.bot.GetMe(context.Background()) + if err != nil { + return fmt.Errorf("failed to get bot info: %w", err) + } + log.Printf("Telegram bot @%s connected", botInfo.Username) + + go func() { + for { + select { + case <-ctx.Done(): + return + case update, ok := <-updates: + if !ok { + log.Printf("Updates channel closed") + return + } + if update.Message != nil { + c.handleMessage(update.Message) + } + } + } + }() + + return nil +} + +func (c *TelegramChannel) Stop(ctx context.Context) error { + log.Println("Stopping Telegram bot...") + c.setRunning(false) + + if c.updates != nil { + c.bot.StopLongPolling() + } + + return nil +} + +func (c *TelegramChannel) Send(ctx context.Context, msg bus.OutboundMessage) error { + if !c.IsRunning() { + return fmt.Errorf("telegram bot not running") + } + + chatIDInt, err := parseChatID(msg.ChatID) + if err != nil { + return fmt.Errorf("invalid chat ID: %w", err) + } + chatID := telegoutil.ID(chatIDInt) + + // Stop thinking animation + if stop, ok := c.stopThinking.Load(msg.ChatID); ok { + close(stop.(chan struct{})) + c.stopThinking.Delete(msg.ChatID) + } + + htmlContent := markdownToTelegramHTML(msg.Content) + + // Try to edit placeholder + if pID, ok := c.placeholders.Load(msg.ChatID); ok { + c.placeholders.Delete(msg.ChatID) + + _, err := c.bot.EditMessageText(ctx, &telego.EditMessageTextParams{ + ChatID: chatID, + MessageID: pID.(int), + Text: htmlContent, + ParseMode: telego.ModeHTML, + }) + + if err == nil { + return nil + } + // Fallback to new message if edit fails + } + + _, err = c.bot.SendMessage(ctx, telegoutil.Message(chatID, htmlContent).WithParseMode(telego.ModeHTML)) + + if err != nil { + log.Printf("HTML parse failed, falling back to plain text: %v", err) + _, err = c.bot.SendMessage(ctx, telegoutil.Message(chatID, msg.Content)) + return err + } + + return nil +} + +func (c *TelegramChannel) handleMessage(message *telego.Message) { + if message == nil { + return + } + + user := message.From + if user == nil { + return + } + + senderID := fmt.Sprintf("%d", user.ID) + if user.Username != "" { + senderID = fmt.Sprintf("%d|%s", user.ID, user.Username) + } + + chatID := message.Chat.ID + c.chatIDs[senderID] = chatID + + content := "" + mediaPaths := []string{} + + if message.Text != "" { + content += message.Text + } + + if message.Caption != "" { + if content != "" { + content += "\n" + } + content += message.Caption + } + + if message.Photo != nil && len(message.Photo) > 0 { + photo := message.Photo[len(message.Photo)-1] + photoPath := c.downloadFile(photo.FileID, ".jpg") + if photoPath != "" { + mediaPaths = append(mediaPaths, photoPath) + if content != "" { + content += "\n" + } + content += fmt.Sprintf("[image: %s]", photoPath) + } + } + + if message.Voice != nil { + voicePath := c.downloadFile(message.Voice.FileID, ".ogg") + if voicePath != "" { + mediaPaths = append(mediaPaths, voicePath) + + transcribedText := "" + if c.transcriber != nil && c.transcriber.IsAvailable() { + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + + result, err := c.transcriber.Transcribe(ctx, voicePath) + if err != nil { + log.Printf("Voice transcription failed: %v", err) + transcribedText = fmt.Sprintf("[voice: %s (transcription failed)]", voicePath) + } else { + transcribedText = fmt.Sprintf("[voice transcription: %s]", result.Text) + log.Printf("Voice transcribed successfully: %s", result.Text) + } + } else { + transcribedText = fmt.Sprintf("[voice: %s]", voicePath) + } + + if content != "" { + content += "\n" + } + content += transcribedText + } + } + + if message.Audio != nil { + audioPath := c.downloadFile(message.Audio.FileID, ".mp3") + if audioPath != "" { + mediaPaths = append(mediaPaths, audioPath) + if content != "" { + content += "\n" + } + content += fmt.Sprintf("[audio: %s]", audioPath) + } + } + + if message.Document != nil { + docPath := c.downloadFile(message.Document.FileID, "") + if docPath != "" { + mediaPaths = append(mediaPaths, docPath) + if content != "" { + content += "\n" + } + content += fmt.Sprintf("[file: %s]", docPath) + } + } + + if content == "" { + content = "[empty message]" + } + + log.Printf("Telegram message from %s: %s...", senderID, truncateString(content, 50)) + + // Thinking indicator + _ = c.bot.SendChatAction(context.Background(), &telego.SendChatActionParams{ + ChatID: telegoutil.ID(chatID), + Action: telego.ChatActionTyping, + }) + + stopChan := make(chan struct{}) + c.stopThinking.Store(fmt.Sprintf("%d", chatID), stopChan) + + pMsg, err := c.bot.SendMessage(context.Background(), telegoutil.Message(telegoutil.ID(chatID), "Thinking... 💭")) + if err == nil { + pID := pMsg.MessageID + c.placeholders.Store(fmt.Sprintf("%d", chatID), pID) + + go func(cid int64, mid int, stop <-chan struct{}) { + dots := []string{".", "..", "..."} + emotes := []string{"💭", "🤔", "☁️"} + i := 0 + ticker := time.NewTicker(2000 * time.Millisecond) + defer ticker.Stop() + for { + select { + case <-stop: + return + case <-ticker.C: + i++ + text := fmt.Sprintf("Thinking%s %s", dots[i%len(dots)], emotes[i%len(emotes)]) + _, _ = c.bot.EditMessageText(context.Background(), &telego.EditMessageTextParams{ + ChatID: telegoutil.ID(cid), + MessageID: mid, + Text: text, + }) + } + } + }(chatID, pID, stopChan) + } + + metadata := map[string]string{ + "message_id": fmt.Sprintf("%d", message.MessageID), + "user_id": fmt.Sprintf("%d", user.ID), + "username": user.Username, + "first_name": user.FirstName, + "is_group": fmt.Sprintf("%t", message.Chat.Type != telego.ChatTypePrivate), + } + + c.HandleMessage(senderID, fmt.Sprintf("%d", chatID), content, mediaPaths, metadata) +} + +func (c *TelegramChannel) downloadFile(fileID, ext string) string { + file, err := c.bot.GetFile(context.Background(), &telego.GetFileParams{FileID: fileID}) + if err != nil { + log.Printf("Failed to get file: %v", err) + return "" + } + + if file.FilePath == "" { + return "" + } + + // In telego, we can use Link() or just build the URL + url := fmt.Sprintf("https://api.telegram.org/file/bot%s/%s", c.config.Token, file.FilePath) + log.Printf("File URL: %s", url) + + mediaDir := filepath.Join(os.TempDir(), "clawgo_media") + if err := os.MkdirAll(mediaDir, 0755); err != nil { + log.Printf("Failed to create media directory: %v", err) + return "" + } + + localPath := filepath.Join(mediaDir, fileID[:min(16, len(fileID))]+ext) + + if err := c.downloadFromURL(url, localPath); err != nil { + log.Printf("Failed to download file: %v", err) + return "" + } + + return localPath +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +func (c *TelegramChannel) downloadFromURL(url, localPath string) error { + resp, err := http.Get(url) + if err != nil { + return fmt.Errorf("failed to download: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("download failed with status: %d", resp.StatusCode) + } + + out, err := os.Create(localPath) + if err != nil { + return fmt.Errorf("failed to create file: %w", err) + } + defer out.Close() + + _, err = io.Copy(out, resp.Body) + if err != nil { + return fmt.Errorf("failed to write file: %w", err) + } + + log.Printf("File downloaded successfully to: %s", localPath) + return nil +} + +func parseChatID(chatIDStr string) (int64, error) { + var id int64 + _, err := fmt.Sscanf(chatIDStr, "%d", &id) + return id, err +} + +func truncateString(s string, maxLen int) string { + if len(s) <= maxLen { + return s + } + return s[:maxLen] +} + +func markdownToTelegramHTML(text string) string { + if text == "" { + return "" + } + + codeBlocks := extractCodeBlocks(text) + text = codeBlocks.text + + inlineCodes := extractInlineCodes(text) + text = inlineCodes.text + + text = regexp.MustCompile(`^#{1,6}\s+(.+)$`).ReplaceAllString(text, "$1") + + text = regexp.MustCompile(`^>\s*(.*)$`).ReplaceAllString(text, "$1") + + text = escapeHTML(text) + + text = regexp.MustCompile(`\[([^\]]+)\]\(([^)]+)\)`).ReplaceAllString(text, `$1`) + + text = regexp.MustCompile(`\*\*(.+?)\*\*`).ReplaceAllString(text, "$1") + + text = regexp.MustCompile(`__(.+?)__`).ReplaceAllString(text, "$1") + + reItalic := regexp.MustCompile(`_([^_]+)_`) + text = reItalic.ReplaceAllStringFunc(text, func(s string) string { + match := reItalic.FindStringSubmatch(s) + if len(match) < 2 { + return s + } + return "" + match[1] + "" + }) + + text = regexp.MustCompile(`~~(.+?)~~`).ReplaceAllString(text, "$1") + + text = regexp.MustCompile(`^[-*]\s+`).ReplaceAllString(text, "• ") + + for i, code := range inlineCodes.codes { + escaped := escapeHTML(code) + text = strings.ReplaceAll(text, fmt.Sprintf("\x00IC%d\x00", i), fmt.Sprintf("%s", escaped)) + } + + for i, code := range codeBlocks.codes { + escaped := escapeHTML(code) + text = strings.ReplaceAll(text, fmt.Sprintf("\x00CB%d\x00", i), fmt.Sprintf("
%s
", escaped)) + } + + return text +} + +type codeBlockMatch struct { + text string + codes []string +} + +func extractCodeBlocks(text string) codeBlockMatch { + re := regexp.MustCompile("```[\\w]*\\n?([\\s\\S]*?)```") + matches := re.FindAllStringSubmatch(text, -1) + + codes := make([]string, 0, len(matches)) + for _, match := range matches { + codes = append(codes, match[1]) + } + + text = re.ReplaceAllStringFunc(text, func(m string) string { + return fmt.Sprintf("\x00CB%d\x00", len(codes)-1) + }) + + return codeBlockMatch{text: text, codes: codes} +} + +type inlineCodeMatch struct { + text string + codes []string +} + +func extractInlineCodes(text string) inlineCodeMatch { + re := regexp.MustCompile("`([^`]+)`") + matches := re.FindAllStringSubmatch(text, -1) + + codes := make([]string, 0, len(matches)) + for _, match := range matches { + codes = append(codes, match[1]) + } + + text = re.ReplaceAllStringFunc(text, func(m string) string { + return fmt.Sprintf("\x00IC%d\x00", len(codes)-1) + }) + + return inlineCodeMatch{text: text, codes: codes} +} + +func escapeHTML(text string) string { + text = strings.ReplaceAll(text, "&", "&") + text = strings.ReplaceAll(text, "<", "<") + text = strings.ReplaceAll(text, ">", ">") + return text +} diff --git a/pkg/channels/whatsapp.go b/pkg/channels/whatsapp.go new file mode 100644 index 0000000..07bf1c1 --- /dev/null +++ b/pkg/channels/whatsapp.go @@ -0,0 +1,183 @@ +package channels + +import ( + "context" + "encoding/json" + "fmt" + "log" + "sync" + "time" + + "github.com/gorilla/websocket" + + "gitea.kkkk.dev/DBT/clawgo/pkg/bus" + "gitea.kkkk.dev/DBT/clawgo/pkg/config" +) + +type WhatsAppChannel struct { + *BaseChannel + conn *websocket.Conn + config config.WhatsAppConfig + url string + mu sync.Mutex + connected bool +} + +func NewWhatsAppChannel(cfg config.WhatsAppConfig, bus *bus.MessageBus) (*WhatsAppChannel, error) { + base := NewBaseChannel("whatsapp", cfg, bus, cfg.AllowFrom) + + return &WhatsAppChannel{ + BaseChannel: base, + config: cfg, + url: cfg.BridgeURL, + connected: false, + }, nil +} + +func (c *WhatsAppChannel) Start(ctx context.Context) error { + log.Printf("Starting WhatsApp channel connecting to %s...", c.url) + + dialer := websocket.DefaultDialer + dialer.HandshakeTimeout = 10 * time.Second + + conn, _, err := dialer.Dial(c.url, nil) + if err != nil { + return fmt.Errorf("failed to connect to WhatsApp bridge: %w", err) + } + + c.mu.Lock() + c.conn = conn + c.connected = true + c.mu.Unlock() + + c.setRunning(true) + log.Println("WhatsApp channel connected") + + go c.listen(ctx) + + return nil +} + +func (c *WhatsAppChannel) Stop(ctx context.Context) error { + log.Println("Stopping WhatsApp channel...") + + c.mu.Lock() + defer c.mu.Unlock() + + if c.conn != nil { + if err := c.conn.Close(); err != nil { + log.Printf("Error closing WhatsApp connection: %v", err) + } + c.conn = nil + } + + c.connected = false + c.setRunning(false) + + return nil +} + +func (c *WhatsAppChannel) Send(ctx context.Context, msg bus.OutboundMessage) error { + c.mu.Lock() + defer c.mu.Unlock() + + if c.conn == nil { + return fmt.Errorf("whatsapp connection not established") + } + + payload := map[string]interface{}{ + "type": "message", + "to": msg.ChatID, + "content": msg.Content, + } + + data, err := json.Marshal(payload) + if err != nil { + return fmt.Errorf("failed to marshal message: %w", err) + } + + if err := c.conn.WriteMessage(websocket.TextMessage, data); err != nil { + return fmt.Errorf("failed to send message: %w", err) + } + + return nil +} + +func (c *WhatsAppChannel) listen(ctx context.Context) { + for { + select { + case <-ctx.Done(): + return + default: + c.mu.Lock() + conn := c.conn + c.mu.Unlock() + + if conn == nil { + time.Sleep(1 * time.Second) + continue + } + + _, message, err := conn.ReadMessage() + if err != nil { + log.Printf("WhatsApp read error: %v", err) + time.Sleep(2 * time.Second) + continue + } + + var msg map[string]interface{} + if err := json.Unmarshal(message, &msg); err != nil { + log.Printf("Failed to unmarshal WhatsApp message: %v", err) + continue + } + + msgType, ok := msg["type"].(string) + if !ok { + continue + } + + if msgType == "message" { + c.handleIncomingMessage(msg) + } + } + } +} + +func (c *WhatsAppChannel) handleIncomingMessage(msg map[string]interface{}) { + senderID, ok := msg["from"].(string) + if !ok { + return + } + + chatID, ok := msg["chat"].(string) + if !ok { + chatID = senderID + } + + content, ok := msg["content"].(string) + if !ok { + content = "" + } + + var mediaPaths []string + if mediaData, ok := msg["media"].([]interface{}); ok { + mediaPaths = make([]string, 0, len(mediaData)) + for _, m := range mediaData { + if path, ok := m.(string); ok { + mediaPaths = append(mediaPaths, path) + } + } + } + + metadata := make(map[string]string) + if messageID, ok := msg["id"].(string); ok { + metadata["message_id"] = messageID + } + if userName, ok := msg["from_name"].(string); ok { + metadata["user_name"] = userName + } + + log.Printf("WhatsApp message from %s: %s...", senderID, truncateString(content, 50)) + + c.HandleMessage(senderID, chatID, content, mediaPaths, metadata) +} diff --git a/pkg/config/config.go b/pkg/config/config.go new file mode 100644 index 0000000..e76378a --- /dev/null +++ b/pkg/config/config.go @@ -0,0 +1,331 @@ +package config + +import ( + "encoding/json" + "os" + "path/filepath" + "sync" + + "github.com/caarlos0/env/v11" +) + +type Config struct { + Agents AgentsConfig `json:"agents"` + Channels ChannelsConfig `json:"channels"` + Providers ProvidersConfig `json:"providers"` + Gateway GatewayConfig `json:"gateway"` + Tools ToolsConfig `json:"tools"` + mu sync.RWMutex +} + +type AgentsConfig struct { + Defaults AgentDefaults `json:"defaults"` +} + +type AgentDefaults struct { + Workspace string `json:"workspace" env:"CLAWGO_AGENTS_DEFAULTS_WORKSPACE"` + Model string `json:"model" env:"CLAWGO_AGENTS_DEFAULTS_MODEL"` + MaxTokens int `json:"max_tokens" env:"CLAWGO_AGENTS_DEFAULTS_MAX_TOKENS"` + Temperature float64 `json:"temperature" env:"CLAWGO_AGENTS_DEFAULTS_TEMPERATURE"` + MaxToolIterations int `json:"max_tool_iterations" env:"CLAWGO_AGENTS_DEFAULTS_MAX_TOOL_ITERATIONS"` +} + +type ChannelsConfig struct { + WhatsApp WhatsAppConfig `json:"whatsapp"` + Telegram TelegramConfig `json:"telegram"` + Feishu FeishuConfig `json:"feishu"` + Discord DiscordConfig `json:"discord"` + MaixCam MaixCamConfig `json:"maixcam"` + QQ QQConfig `json:"qq"` + DingTalk DingTalkConfig `json:"dingtalk"` +} + +type WhatsAppConfig struct { + Enabled bool `json:"enabled" env:"CLAWGO_CHANNELS_WHATSAPP_ENABLED"` + BridgeURL string `json:"bridge_url" env:"CLAWGO_CHANNELS_WHATSAPP_BRIDGE_URL"` + AllowFrom []string `json:"allow_from" env:"CLAWGO_CHANNELS_WHATSAPP_ALLOW_FROM"` +} + +type TelegramConfig struct { + Enabled bool `json:"enabled" env:"CLAWGO_CHANNELS_TELEGRAM_ENABLED"` + Token string `json:"token" env:"CLAWGO_CHANNELS_TELEGRAM_TOKEN"` + AllowFrom []string `json:"allow_from" env:"CLAWGO_CHANNELS_TELEGRAM_ALLOW_FROM"` +} + +type FeishuConfig struct { + Enabled bool `json:"enabled" env:"CLAWGO_CHANNELS_FEISHU_ENABLED"` + AppID string `json:"app_id" env:"CLAWGO_CHANNELS_FEISHU_APP_ID"` + AppSecret string `json:"app_secret" env:"CLAWGO_CHANNELS_FEISHU_APP_SECRET"` + EncryptKey string `json:"encrypt_key" env:"CLAWGO_CHANNELS_FEISHU_ENCRYPT_KEY"` + VerificationToken string `json:"verification_token" env:"CLAWGO_CHANNELS_FEISHU_VERIFICATION_TOKEN"` + AllowFrom []string `json:"allow_from" env:"CLAWGO_CHANNELS_FEISHU_ALLOW_FROM"` +} + +type DiscordConfig struct { + Enabled bool `json:"enabled" env:"CLAWGO_CHANNELS_DISCORD_ENABLED"` + Token string `json:"token" env:"CLAWGO_CHANNELS_DISCORD_TOKEN"` + AllowFrom []string `json:"allow_from" env:"CLAWGO_CHANNELS_DISCORD_ALLOW_FROM"` +} + +type MaixCamConfig struct { + Enabled bool `json:"enabled" env:"CLAWGO_CHANNELS_MAIXCAM_ENABLED"` + Host string `json:"host" env:"CLAWGO_CHANNELS_MAIXCAM_HOST"` + Port int `json:"port" env:"CLAWGO_CHANNELS_MAIXCAM_PORT"` + AllowFrom []string `json:"allow_from" env:"CLAWGO_CHANNELS_MAIXCAM_ALLOW_FROM"` +} + +type QQConfig struct { + Enabled bool `json:"enabled" env:"CLAWGO_CHANNELS_QQ_ENABLED"` + AppID string `json:"app_id" env:"CLAWGO_CHANNELS_QQ_APP_ID"` + AppSecret string `json:"app_secret" env:"CLAWGO_CHANNELS_QQ_APP_SECRET"` + AllowFrom []string `json:"allow_from" env:"CLAWGO_CHANNELS_QQ_ALLOW_FROM"` +} + +type DingTalkConfig struct { + Enabled bool `json:"enabled" env:"CLAWGO_CHANNELS_DINGTALK_ENABLED"` + ClientID string `json:"client_id" env:"CLAWGO_CHANNELS_DINGTALK_CLIENT_ID"` + ClientSecret string `json:"client_secret" env:"CLAWGO_CHANNELS_DINGTALK_CLIENT_SECRET"` + AllowFrom []string `json:"allow_from" env:"CLAWGO_CHANNELS_DINGTALK_ALLOW_FROM"` +} + +type ProvidersConfig struct { + Anthropic ProviderConfig `json:"anthropic"` + OpenAI ProviderConfig `json:"openai"` + OpenRouter ProviderConfig `json:"openrouter"` + Groq ProviderConfig `json:"groq"` + Zhipu ProviderConfig `json:"zhipu"` + VLLM ProviderConfig `json:"vllm"` + Gemini ProviderConfig `json:"gemini"` +} + +type ProviderConfig struct { + APIKey string `json:"api_key" env:"CLAWGO_PROVIDERS_{{.Name}}_API_KEY"` + APIBase string `json:"api_base" env:"CLAWGO_PROVIDERS_{{.Name}}_API_BASE"` + Auth string `json:"auth" env:"CLAWGO_PROVIDERS_{{.Name}}_AUTH"` +} + +type GatewayConfig struct { + Host string `json:"host" env:"CLAWGO_GATEWAY_HOST"` + Port int `json:"port" env:"CLAWGO_GATEWAY_PORT"` +} + +type WebSearchConfig struct { + APIKey string `json:"api_key" env:"CLAWGO_TOOLS_WEB_SEARCH_API_KEY"` + MaxResults int `json:"max_results" env:"CLAWGO_TOOLS_WEB_SEARCH_MAX_RESULTS"` +} + +type WebToolsConfig struct { + Search WebSearchConfig `json:"search"` +} + +type ToolsConfig struct { + Web WebToolsConfig `json:"web"` +} + +var ( + isDebug bool + muDebug sync.RWMutex +) + +func SetDebugMode(debug bool) { + muDebug.Lock() + defer muDebug.Unlock() + isDebug = debug +} + +func IsDebugMode() bool { + muDebug.RLock() + defer muDebug.RUnlock() + return isDebug +} + +func GetConfigDir() string { + if IsDebugMode() { + return ".clawgo" + } + home, _ := os.UserHomeDir() + return filepath.Join(home, ".clawgo") +} + +func DefaultConfig() *Config { + configDir := GetConfigDir() + return &Config{ + Agents: AgentsConfig{ + Defaults: AgentDefaults{ + Workspace: filepath.Join(configDir, "workspace"), + Model: "glm-4.7", + MaxTokens: 8192, + Temperature: 0.7, + MaxToolIterations: 20, + }, + }, + Channels: ChannelsConfig{ + WhatsApp: WhatsAppConfig{ + Enabled: false, + BridgeURL: "ws://localhost:3001", + AllowFrom: []string{}, + }, + Telegram: TelegramConfig{ + Enabled: false, + Token: "", + AllowFrom: []string{}, + }, + Feishu: FeishuConfig{ + Enabled: false, + AppID: "", + AppSecret: "", + EncryptKey: "", + VerificationToken: "", + AllowFrom: []string{}, + }, + Discord: DiscordConfig{ + Enabled: false, + Token: "", + AllowFrom: []string{}, + }, + MaixCam: MaixCamConfig{ + Enabled: false, + Host: "0.0.0.0", + Port: 18790, + AllowFrom: []string{}, + }, + QQ: QQConfig{ + Enabled: false, + AppID: "", + AppSecret: "", + AllowFrom: []string{}, + }, + DingTalk: DingTalkConfig{ + Enabled: false, + ClientID: "", + ClientSecret: "", + AllowFrom: []string{}, + }, + }, + Providers: ProvidersConfig{ + Anthropic: ProviderConfig{}, + OpenAI: ProviderConfig{}, + OpenRouter: ProviderConfig{}, + Groq: ProviderConfig{}, + Zhipu: ProviderConfig{}, + VLLM: ProviderConfig{}, + Gemini: ProviderConfig{}, + }, + Gateway: GatewayConfig{ + Host: "0.0.0.0", + Port: 18790, + }, + Tools: ToolsConfig{ + Web: WebToolsConfig{ + Search: WebSearchConfig{ + APIKey: "", + MaxResults: 5, + }, + }, + }, + } +} + +func LoadConfig(path string) (*Config, error) { + cfg := DefaultConfig() + + data, err := os.ReadFile(path) + if err != nil { + if os.IsNotExist(err) { + return cfg, nil + } + return nil, err + } + + if err := json.Unmarshal(data, cfg); err != nil { + return nil, err + } + + if err := env.Parse(cfg); err != nil { + return nil, err + } + + return cfg, nil +} + +func SaveConfig(path string, cfg *Config) error { + cfg.mu.RLock() + defer cfg.mu.RUnlock() + + data, err := json.MarshalIndent(cfg, "", " ") + if err != nil { + return err + } + + dir := filepath.Dir(path) + if err := os.MkdirAll(dir, 0755); err != nil { + return err + } + + return os.WriteFile(path, data, 0644) +} + +func (c *Config) WorkspacePath() string { + c.mu.RLock() + defer c.mu.RUnlock() + return expandHome(c.Agents.Defaults.Workspace) +} + +func (c *Config) GetAPIKey() string { + c.mu.RLock() + defer c.mu.RUnlock() + if c.Providers.OpenRouter.APIKey != "" { + return c.Providers.OpenRouter.APIKey + } + if c.Providers.Anthropic.APIKey != "" { + return c.Providers.Anthropic.APIKey + } + if c.Providers.OpenAI.APIKey != "" { + return c.Providers.OpenAI.APIKey + } + if c.Providers.Gemini.APIKey != "" { + return c.Providers.Gemini.APIKey + } + if c.Providers.Zhipu.APIKey != "" { + return c.Providers.Zhipu.APIKey + } + if c.Providers.Groq.APIKey != "" { + return c.Providers.Groq.APIKey + } + if c.Providers.VLLM.APIKey != "" { + return c.Providers.VLLM.APIKey + } + return "" +} + +func (c *Config) GetAPIBase() string { + c.mu.RLock() + defer c.mu.RUnlock() + if c.Providers.OpenRouter.APIKey != "" { + if c.Providers.OpenRouter.APIBase != "" { + return c.Providers.OpenRouter.APIBase + } + return "https://openrouter.ai/api/v1" + } + if c.Providers.Zhipu.APIKey != "" { + return c.Providers.Zhipu.APIBase + } + if c.Providers.VLLM.APIKey != "" && c.Providers.VLLM.APIBase != "" { + return c.Providers.VLLM.APIBase + } + return "" +} + +func expandHome(path string) string { + if path == "" { + return path + } + if path[0] == '~' { + home, _ := os.UserHomeDir() + if len(path) > 1 && path[1] == '/' { + return home + path[1:] + } + return home + } + return path +} diff --git a/pkg/cron/service.go b/pkg/cron/service.go new file mode 100644 index 0000000..96cdb5d --- /dev/null +++ b/pkg/cron/service.go @@ -0,0 +1,392 @@ +package cron + +import ( + "encoding/json" + "fmt" + "os" + "path/filepath" + "sync" + "time" +) + +type CronSchedule struct { + Kind string `json:"kind"` + AtMS *int64 `json:"atMs,omitempty"` + EveryMS *int64 `json:"everyMs,omitempty"` + Expr string `json:"expr,omitempty"` + TZ string `json:"tz,omitempty"` +} + +type CronPayload struct { + Kind string `json:"kind"` + Message string `json:"message"` + Deliver bool `json:"deliver"` + Channel string `json:"channel,omitempty"` + To string `json:"to,omitempty"` +} + +type CronJobState struct { + NextRunAtMS *int64 `json:"nextRunAtMs,omitempty"` + LastRunAtMS *int64 `json:"lastRunAtMs,omitempty"` + LastStatus string `json:"lastStatus,omitempty"` + LastError string `json:"lastError,omitempty"` +} + +type CronJob struct { + ID string `json:"id"` + Name string `json:"name"` + Enabled bool `json:"enabled"` + Schedule CronSchedule `json:"schedule"` + Payload CronPayload `json:"payload"` + State CronJobState `json:"state"` + CreatedAtMS int64 `json:"createdAtMs"` + UpdatedAtMS int64 `json:"updatedAtMs"` + DeleteAfterRun bool `json:"deleteAfterRun"` +} + +type CronStore struct { + Version int `json:"version"` + Jobs []CronJob `json:"jobs"` +} + +type JobHandler func(job *CronJob) (string, error) + +type CronService struct { + storePath string + store *CronStore + onJob JobHandler + mu sync.RWMutex + wg sync.WaitGroup + running bool + stopChan chan struct{} +} + +func NewCronService(storePath string, onJob JobHandler) *CronService { + cs := &CronService{ + storePath: storePath, + onJob: onJob, + stopChan: make(chan struct{}), + } + cs.loadStore() + return cs +} + +func (cs *CronService) Start() error { + cs.mu.Lock() + defer cs.mu.Unlock() + + if cs.running { + return nil + } + + select { + case <-cs.stopChan: + cs.stopChan = make(chan struct{}) + default: + } + + if err := cs.loadStore(); err != nil { + return fmt.Errorf("failed to load store: %w", err) + } + + cs.recomputeNextRuns() + if err := cs.saveStore(); err != nil { + return fmt.Errorf("failed to save store: %w", err) + } + + cs.running = true + cs.wg.Add(1) + go cs.runLoop() + + return nil +} + +func (cs *CronService) Stop() { + cs.mu.Lock() + if !cs.running { + cs.mu.Unlock() + return + } + + cs.running = false + close(cs.stopChan) + cs.mu.Unlock() + + cs.wg.Wait() +} + +func (cs *CronService) runLoop() { + defer cs.wg.Done() + ticker := time.NewTicker(1 * time.Second) + defer ticker.Stop() + + for { + select { + case <-cs.stopChan: + return + case <-ticker.C: + cs.checkJobs() + } + } +} + +func (cs *CronService) checkJobs() { + cs.mu.RLock() + if !cs.running { + cs.mu.RUnlock() + return + } + + now := time.Now().UnixMilli() + var dueJobs []*CronJob + + for i := range cs.store.Jobs { + job := &cs.store.Jobs[i] + if job.Enabled && job.State.NextRunAtMS != nil && *job.State.NextRunAtMS <= now { + dueJobs = append(dueJobs, job) + } + } + cs.mu.RUnlock() + + for _, job := range dueJobs { + cs.executeJob(job) + } + + cs.mu.Lock() + defer cs.mu.Unlock() + cs.saveStore() +} + +func (cs *CronService) executeJob(job *CronJob) { + startTime := time.Now().UnixMilli() + + var err error + if cs.onJob != nil { + _, err = cs.onJob(job) + } + + cs.mu.Lock() + defer cs.mu.Unlock() + + job.State.LastRunAtMS = &startTime + job.UpdatedAtMS = time.Now().UnixMilli() + + if err != nil { + job.State.LastStatus = "error" + job.State.LastError = err.Error() + } else { + job.State.LastStatus = "ok" + job.State.LastError = "" + } + + if job.Schedule.Kind == "at" { + if job.DeleteAfterRun { + cs.removeJobUnsafe(job.ID) + } else { + job.Enabled = false + job.State.NextRunAtMS = nil + } + } else { + nextRun := cs.computeNextRun(&job.Schedule, time.Now().UnixMilli()) + job.State.NextRunAtMS = nextRun + } +} + +func (cs *CronService) computeNextRun(schedule *CronSchedule, nowMS int64) *int64 { + if schedule.Kind == "at" { + if schedule.AtMS != nil && *schedule.AtMS > nowMS { + return schedule.AtMS + } + return nil + } + + if schedule.Kind == "every" { + if schedule.EveryMS == nil || *schedule.EveryMS <= 0 { + return nil + } + next := nowMS + *schedule.EveryMS + return &next + } + + return nil +} + +func (cs *CronService) recomputeNextRuns() { + now := time.Now().UnixMilli() + for i := range cs.store.Jobs { + job := &cs.store.Jobs[i] + if job.Enabled { + job.State.NextRunAtMS = cs.computeNextRun(&job.Schedule, now) + } + } +} + +func (cs *CronService) getNextWakeMS() *int64 { + var nextWake *int64 + for _, job := range cs.store.Jobs { + if job.Enabled && job.State.NextRunAtMS != nil { + if nextWake == nil || *job.State.NextRunAtMS < *nextWake { + nextWake = job.State.NextRunAtMS + } + } + } + return nextWake +} + +func (cs *CronService) Load() error { + return cs.loadStore() +} + +func (cs *CronService) loadStore() error { + cs.store = &CronStore{ + Version: 1, + Jobs: []CronJob{}, + } + + data, err := os.ReadFile(cs.storePath) + if err != nil { + if os.IsNotExist(err) { + return nil + } + return err + } + + return json.Unmarshal(data, cs.store) +} + +func (cs *CronService) saveStore() error { + dir := filepath.Dir(cs.storePath) + if err := os.MkdirAll(dir, 0755); err != nil { + return err + } + + data, err := json.MarshalIndent(cs.store, "", " ") + if err != nil { + return err + } + + return os.WriteFile(cs.storePath, data, 0644) +} + +func (cs *CronService) AddJob(name string, schedule CronSchedule, message string, deliver bool, channel, to string) (*CronJob, error) { + cs.mu.Lock() + defer cs.mu.Unlock() + + now := time.Now().UnixMilli() + + job := CronJob{ + ID: generateID(), + Name: name, + Enabled: true, + Schedule: schedule, + Payload: CronPayload{ + Kind: "agent_turn", + Message: message, + Deliver: deliver, + Channel: channel, + To: to, + }, + State: CronJobState{ + NextRunAtMS: cs.computeNextRun(&schedule, now), + }, + CreatedAtMS: now, + UpdatedAtMS: now, + DeleteAfterRun: false, + } + + cs.store.Jobs = append(cs.store.Jobs, job) + if err := cs.saveStore(); err != nil { + return nil, err + } + + return &job, nil +} + +func (cs *CronService) RemoveJob(jobID string) bool { + cs.mu.Lock() + defer cs.mu.Unlock() + + return cs.removeJobUnsafe(jobID) +} + +func (cs *CronService) removeJobUnsafe(jobID string) bool { + before := len(cs.store.Jobs) + var jobs []CronJob + for _, job := range cs.store.Jobs { + if job.ID != jobID { + jobs = append(jobs, job) + } + } + cs.store.Jobs = jobs + removed := len(cs.store.Jobs) < before + + if removed { + cs.saveStore() + } + + return removed +} + +func (cs *CronService) EnableJob(jobID string, enabled bool) *CronJob { + cs.mu.Lock() + defer cs.mu.Unlock() + + for i := range cs.store.Jobs { + job := &cs.store.Jobs[i] + if job.ID == jobID { + job.Enabled = enabled + job.UpdatedAtMS = time.Now().UnixMilli() + + if enabled { + job.State.NextRunAtMS = cs.computeNextRun(&job.Schedule, time.Now().UnixMilli()) + } else { + job.State.NextRunAtMS = nil + } + + cs.saveStore() + return job + } + } + + return nil +} + +func (cs *CronService) ListJobs(includeDisabled bool) []CronJob { + cs.mu.RLock() + defer cs.mu.RUnlock() + + if includeDisabled { + return cs.store.Jobs + } + + var enabled []CronJob + for _, job := range cs.store.Jobs { + if job.Enabled { + enabled = append(enabled, job) + } + } + + return enabled +} + +func (cs *CronService) Status() map[string]interface{} { + cs.mu.RLock() + defer cs.mu.RUnlock() + + var enabledCount int + for _, job := range cs.store.Jobs { + if job.Enabled { + enabledCount++ + } + } + + return map[string]interface{}{ + "enabled": cs.running, + "jobs": len(cs.store.Jobs), + "nextWakeAtMS": cs.getNextWakeMS(), + } +} + +func generateID() string { + return fmt.Sprintf("%d", time.Now().UnixNano()) +} diff --git a/pkg/heartbeat/service.go b/pkg/heartbeat/service.go new file mode 100644 index 0000000..fb0cdd5 --- /dev/null +++ b/pkg/heartbeat/service.go @@ -0,0 +1,140 @@ +package heartbeat + +import ( + "fmt" + "os" + "path/filepath" + "sync" + "time" +) + +type HeartbeatService struct { + workspace string + onHeartbeat func(string) (string, error) + interval time.Duration + enabled bool + mu sync.RWMutex + wg sync.WaitGroup + runningFlag bool + stopChan chan struct{} +} + +func NewHeartbeatService(workspace string, onHeartbeat func(string) (string, error), intervalS int, enabled bool) *HeartbeatService { + return &HeartbeatService{ + workspace: workspace, + onHeartbeat: onHeartbeat, + interval: time.Duration(intervalS) * time.Second, + enabled: enabled, + stopChan: make(chan struct{}), + } +} + +func (hs *HeartbeatService) Start() error { + hs.mu.Lock() + defer hs.mu.Unlock() + + if hs.runningFlag { + return nil + } + + if !hs.enabled { + return fmt.Errorf("heartbeat service is disabled") + } + + hs.stopChan = make(chan struct{}) + hs.runningFlag = true + hs.wg.Add(1) + go hs.runLoop() + + return nil +} + +func (hs *HeartbeatService) Stop() { + hs.mu.Lock() + if !hs.runningFlag { + hs.mu.Unlock() + return + } + + hs.runningFlag = false + close(hs.stopChan) + hs.mu.Unlock() + + hs.wg.Wait() +} + +func (hs *HeartbeatService) running() bool { + hs.mu.RLock() + defer hs.mu.RUnlock() + return hs.runningFlag +} + +func (hs *HeartbeatService) runLoop() { + defer hs.wg.Done() + ticker := time.NewTicker(hs.interval) + defer ticker.Stop() + + for { + select { + case <-hs.stopChan: + return + case <-ticker.C: + hs.checkHeartbeat() + } + } +} + +func (hs *HeartbeatService) checkHeartbeat() { + hs.mu.RLock() + if !hs.enabled || !hs.running() { + hs.mu.RUnlock() + return + } + hs.mu.RUnlock() + + prompt := hs.buildPrompt() + + if hs.onHeartbeat != nil { + _, err := hs.onHeartbeat(prompt) + if err != nil { + hs.log(fmt.Sprintf("Heartbeat error: %v", err)) + } + } +} + +func (hs *HeartbeatService) buildPrompt() string { + notesDir := filepath.Join(hs.workspace, "memory") + notesFile := filepath.Join(notesDir, "HEARTBEAT.md") + + var notes string + if data, err := os.ReadFile(notesFile); err == nil { + notes = string(data) + } + + now := time.Now().Format("2006-01-02 15:04") + + prompt := fmt.Sprintf(`# Heartbeat Check + +Current time: %s + +Check if there are any tasks I should be aware of or actions I should take. +Review the memory file for any important updates or changes. +Be proactive in identifying potential issues or improvements. + +%s +`, now, notes) + + return prompt +} + +func (hs *HeartbeatService) log(message string) { + logFile := filepath.Join(hs.workspace, "memory", "heartbeat.log") + f, err := os.OpenFile(logFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + return + } + defer f.Close() + + timestamp := time.Now().Format("2006-01-02 15:04:05") + f.WriteString(fmt.Sprintf("[%s] %s\n", timestamp, message)) +} diff --git a/pkg/logger/logger.go b/pkg/logger/logger.go new file mode 100644 index 0000000..22f6682 --- /dev/null +++ b/pkg/logger/logger.go @@ -0,0 +1,239 @@ +package logger + +import ( + "encoding/json" + "fmt" + "log" + "os" + "runtime" + "strings" + "sync" + "time" +) + +type LogLevel int + +const ( + DEBUG LogLevel = iota + INFO + WARN + ERROR + FATAL +) + +var ( + logLevelNames = map[LogLevel]string{ + DEBUG: "DEBUG", + INFO: "INFO", + WARN: "WARN", + ERROR: "ERROR", + FATAL: "FATAL", + } + + currentLevel = INFO + logger *Logger + once sync.Once + mu sync.RWMutex +) + +type Logger struct { + file *os.File +} + +type LogEntry struct { + Level string `json:"level"` + Timestamp string `json:"timestamp"` + Component string `json:"component,omitempty"` + Message string `json:"message"` + Fields map[string]interface{} `json:"fields,omitempty"` + Caller string `json:"caller,omitempty"` +} + +func init() { + once.Do(func() { + logger = &Logger{} + }) +} + +func SetLevel(level LogLevel) { + mu.Lock() + defer mu.Unlock() + currentLevel = level +} + +func GetLevel() LogLevel { + mu.RLock() + defer mu.RUnlock() + return currentLevel +} + +func EnableFileLogging(filePath string) error { + mu.Lock() + defer mu.Unlock() + + file, err := os.OpenFile(filePath, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644) + if err != nil { + return fmt.Errorf("failed to open log file: %w", err) + } + + if logger.file != nil { + logger.file.Close() + } + + logger.file = file + log.Println("File logging enabled:", filePath) + return nil +} + +func DisableFileLogging() { + mu.Lock() + defer mu.Unlock() + + if logger.file != nil { + logger.file.Close() + logger.file = nil + log.Println("File logging disabled") + } +} + +func logMessage(level LogLevel, component string, message string, fields map[string]interface{}) { + if level < currentLevel { + return + } + + entry := LogEntry{ + Level: logLevelNames[level], + Timestamp: time.Now().UTC().Format(time.RFC3339), + Component: component, + Message: message, + Fields: fields, + } + + if pc, file, line, ok := runtime.Caller(2); ok { + fn := runtime.FuncForPC(pc) + if fn != nil { + entry.Caller = fmt.Sprintf("%s:%d (%s)", file, line, fn.Name()) + } + } + + if logger.file != nil { + jsonData, err := json.Marshal(entry) + if err == nil { + logger.file.WriteString(string(jsonData) + "\n") + } + } + + var fieldStr string + if len(fields) > 0 { + fieldStr = " " + formatFields(fields) + } + + logLine := fmt.Sprintf("[%s] [%s]%s %s%s", + entry.Timestamp, + logLevelNames[level], + formatComponent(component), + message, + fieldStr, + ) + + log.Println(logLine) + + if level == FATAL { + os.Exit(1) + } +} + +func formatComponent(component string) string { + if component == "" { + return "" + } + return fmt.Sprintf(" %s:", component) +} + +func formatFields(fields map[string]interface{}) string { + var parts []string + for k, v := range fields { + parts = append(parts, fmt.Sprintf("%s=%v", k, v)) + } + return fmt.Sprintf("{%s}", strings.Join(parts, ", ")) +} + +func Debug(message string) { + logMessage(DEBUG, "", message, nil) +} + +func DebugC(component string, message string) { + logMessage(DEBUG, component, message, nil) +} + +func DebugF(message string, fields map[string]interface{}) { + logMessage(DEBUG, "", message, fields) +} + +func DebugCF(component string, message string, fields map[string]interface{}) { + logMessage(DEBUG, component, message, fields) +} + +func Info(message string) { + logMessage(INFO, "", message, nil) +} + +func InfoC(component string, message string) { + logMessage(INFO, component, message, nil) +} + +func InfoF(message string, fields map[string]interface{}) { + logMessage(INFO, "", message, fields) +} + +func InfoCF(component string, message string, fields map[string]interface{}) { + logMessage(INFO, component, message, fields) +} + +func Warn(message string) { + logMessage(WARN, "", message, nil) +} + +func WarnC(component string, message string) { + logMessage(WARN, component, message, nil) +} + +func WarnF(message string, fields map[string]interface{}) { + logMessage(WARN, "", message, fields) +} + +func WarnCF(component string, message string, fields map[string]interface{}) { + logMessage(WARN, component, message, fields) +} + +func Error(message string) { + logMessage(ERROR, "", message, nil) +} + +func ErrorC(component string, message string) { + logMessage(ERROR, component, message, nil) +} + +func ErrorF(message string, fields map[string]interface{}) { + logMessage(ERROR, "", message, fields) +} + +func ErrorCF(component string, message string, fields map[string]interface{}) { + logMessage(ERROR, component, message, fields) +} + +func Fatal(message string) { + logMessage(FATAL, "", message, nil) +} + +func FatalC(component string, message string) { + logMessage(FATAL, component, message, nil) +} + +func FatalF(message string, fields map[string]interface{}) { + logMessage(FATAL, "", message, fields) +} + +func FatalCF(component string, message string, fields map[string]interface{}) { + logMessage(FATAL, component, message, fields) +} diff --git a/pkg/logger/logger_test.go b/pkg/logger/logger_test.go new file mode 100644 index 0000000..9b9c968 --- /dev/null +++ b/pkg/logger/logger_test.go @@ -0,0 +1,139 @@ +package logger + +import ( + "testing" +) + +func TestLogLevelFiltering(t *testing.T) { + initialLevel := GetLevel() + defer SetLevel(initialLevel) + + SetLevel(WARN) + + tests := []struct { + name string + level LogLevel + shouldLog bool + }{ + {"DEBUG message", DEBUG, false}, + {"INFO message", INFO, false}, + {"WARN message", WARN, true}, + {"ERROR message", ERROR, true}, + {"FATAL message", FATAL, true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + switch tt.level { + case DEBUG: + Debug(tt.name) + case INFO: + Info(tt.name) + case WARN: + Warn(tt.name) + case ERROR: + Error(tt.name) + case FATAL: + if tt.shouldLog { + t.Logf("FATAL test skipped to prevent program exit") + } + } + }) + } + + SetLevel(INFO) +} + +func TestLoggerWithComponent(t *testing.T) { + initialLevel := GetLevel() + defer SetLevel(initialLevel) + + SetLevel(DEBUG) + + tests := []struct { + name string + component string + message string + fields map[string]interface{} + }{ + {"Simple message", "test", "Hello, world!", nil}, + {"Message with component", "discord", "Discord message", nil}, + {"Message with fields", "telegram", "Telegram message", map[string]interface{}{ + "user_id": "12345", + "count": 42, + }}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + switch { + case tt.fields == nil && tt.component != "": + InfoC(tt.component, tt.message) + case tt.fields != nil: + InfoF(tt.message, tt.fields) + default: + Info(tt.message) + } + }) + } + + SetLevel(INFO) +} + +func TestLogLevels(t *testing.T) { + tests := []struct { + name string + level LogLevel + want string + }{ + {"DEBUG level", DEBUG, "DEBUG"}, + {"INFO level", INFO, "INFO"}, + {"WARN level", WARN, "WARN"}, + {"ERROR level", ERROR, "ERROR"}, + {"FATAL level", FATAL, "FATAL"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if logLevelNames[tt.level] != tt.want { + t.Errorf("logLevelNames[%d] = %s, want %s", tt.level, logLevelNames[tt.level], tt.want) + } + }) + } +} + +func TestSetGetLevel(t *testing.T) { + initialLevel := GetLevel() + defer SetLevel(initialLevel) + + tests := []LogLevel{DEBUG, INFO, WARN, ERROR, FATAL} + + for _, level := range tests { + SetLevel(level) + if GetLevel() != level { + t.Errorf("SetLevel(%v) -> GetLevel() = %v, want %v", level, GetLevel(), level) + } + } +} + +func TestLoggerHelperFunctions(t *testing.T) { + initialLevel := GetLevel() + defer SetLevel(initialLevel) + + SetLevel(INFO) + + Debug("This should not log") + Info("This should log") + Warn("This should log") + Error("This should log") + + InfoC("test", "Component message") + InfoF("Fields message", map[string]interface{}{"key": "value"}) + + WarnC("test", "Warning with component") + ErrorF("Error with fields", map[string]interface{}{"error": "test"}) + + SetLevel(DEBUG) + DebugC("test", "Debug with component") + WarnF("Warning with fields", map[string]interface{}{"key": "value"}) +} diff --git a/pkg/providers/http_provider.go b/pkg/providers/http_provider.go new file mode 100644 index 0000000..7968d83 --- /dev/null +++ b/pkg/providers/http_provider.go @@ -0,0 +1,262 @@ +// ClawGo - Ultra-lightweight personal AI agent +// Inspired by and based on nanobot: https://github.com/HKUDS/nanobot +// License: MIT +// +// Copyright (c) 2026 ClawGo contributors + +package providers + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + + "gitea.kkkk.dev/DBT/clawgo/pkg/config" +) + +type HTTPProvider struct { + apiKey string + apiBase string + authMode string + httpClient *http.Client +} + +func NewHTTPProvider(apiKey, apiBase, authMode string) *HTTPProvider { + return &HTTPProvider{ + apiKey: apiKey, + apiBase: apiBase, + authMode: authMode, + httpClient: &http.Client{ + Timeout: 0, + }, + } +} + +func (p *HTTPProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*LLMResponse, error) { + if p.apiBase == "" { + return nil, fmt.Errorf("API base not configured") + } + + requestBody := map[string]interface{}{ + "model": model, + "messages": messages, + } + + if len(tools) > 0 { + requestBody["tools"] = tools + requestBody["tool_choice"] = "auto" + } + + if maxTokens, ok := options["max_tokens"].(int); ok { + requestBody["max_tokens"] = maxTokens + } + + if temperature, ok := options["temperature"].(float64); ok { + requestBody["temperature"] = temperature + } + + jsonData, err := json.Marshal(requestBody) + if err != nil { + return nil, fmt.Errorf("failed to marshal request: %w", err) + } + + req, err := http.NewRequestWithContext(ctx, "POST", p.apiBase+"/chat/completions", bytes.NewReader(jsonData)) + if err != nil { + return nil, fmt.Errorf("failed to create request: %w", err) + } + + req.Header.Set("Content-Type", "application/json") + if p.apiKey != "" { + if p.authMode == "oauth" { + req.Header.Set("Authorization", "Bearer "+p.apiKey) + } else if strings.Contains(p.apiBase, "googleapis.com") { + // Gemini direct API uses x-goog-api-key header or key query param + req.Header.Set("x-goog-api-key", p.apiKey) + } else { + authHeader := "Bearer " + p.apiKey + req.Header.Set("Authorization", authHeader) + } + } + + resp, err := p.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to send request: %w", err) + } + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("failed to read response: %w", err) + } + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("API error: %s", string(body)) + } + + return p.parseResponse(body) +} + +func (p *HTTPProvider) parseResponse(body []byte) (*LLMResponse, error) { + var apiResponse struct { + Choices []struct { + Message struct { + Content string `json:"content"` + ToolCalls []struct { + ID string `json:"id"` + Type string `json:"type"` + Function *struct { + Name string `json:"name"` + Arguments string `json:"arguments"` + } `json:"function"` + } `json:"tool_calls"` + } `json:"message"` + FinishReason string `json:"finish_reason"` + } `json:"choices"` + Usage *UsageInfo `json:"usage"` + } + + if err := json.Unmarshal(body, &apiResponse); err != nil { + return nil, fmt.Errorf("failed to unmarshal response: %w", err) + } + + if len(apiResponse.Choices) == 0 { + return &LLMResponse{ + Content: "", + FinishReason: "stop", + }, nil + } + + choice := apiResponse.Choices[0] + + toolCalls := make([]ToolCall, 0, len(choice.Message.ToolCalls)) + for _, tc := range choice.Message.ToolCalls { + arguments := make(map[string]interface{}) + name := "" + + // Handle OpenAI format with nested function object + if tc.Type == "function" && tc.Function != nil { + name = tc.Function.Name + if tc.Function.Arguments != "" { + if err := json.Unmarshal([]byte(tc.Function.Arguments), &arguments); err != nil { + arguments["raw"] = tc.Function.Arguments + } + } + } else if tc.Function != nil { + // Legacy format without type field + name = tc.Function.Name + if tc.Function.Arguments != "" { + if err := json.Unmarshal([]byte(tc.Function.Arguments), &arguments); err != nil { + arguments["raw"] = tc.Function.Arguments + } + } + } + + toolCalls = append(toolCalls, ToolCall{ + ID: tc.ID, + Name: name, + Arguments: arguments, + }) + } + + return &LLMResponse{ + Content: choice.Message.Content, + ToolCalls: toolCalls, + FinishReason: choice.FinishReason, + Usage: apiResponse.Usage, + }, nil +} + +func (p *HTTPProvider) GetDefaultModel() string { + return "" +} + +func CreateProvider(cfg *config.Config) (LLMProvider, error) { + model := cfg.Agents.Defaults.Model + + var apiKey, apiBase, authMode string + + lowerModel := strings.ToLower(model) + + switch { + case strings.HasPrefix(model, "openrouter/") || strings.HasPrefix(model, "anthropic/") || strings.HasPrefix(model, "openai/") || strings.HasPrefix(model, "meta-llama/") || strings.HasPrefix(model, "deepseek/") || strings.HasPrefix(model, "google/"): + apiKey = cfg.Providers.OpenRouter.APIKey + authMode = cfg.Providers.OpenRouter.Auth + if cfg.Providers.OpenRouter.APIBase != "" { + apiBase = cfg.Providers.OpenRouter.APIBase + } else { + apiBase = "https://openrouter.ai/api/v1" + } + + case strings.Contains(lowerModel, "claude") || strings.HasPrefix(model, "anthropic/"): + apiKey = cfg.Providers.Anthropic.APIKey + authMode = cfg.Providers.Anthropic.Auth + apiBase = cfg.Providers.Anthropic.APIBase + if apiBase == "" { + apiBase = "https://api.anthropic.com/v1" + } + + case strings.Contains(lowerModel, "gpt") || strings.HasPrefix(model, "openai/") || strings.Contains(lowerModel, "codex"): + apiKey = cfg.Providers.OpenAI.APIKey + authMode = cfg.Providers.OpenAI.Auth + apiBase = cfg.Providers.OpenAI.APIBase + if apiBase == "" { + apiBase = "https://api.openai.com/v1" + } + + case strings.Contains(lowerModel, "gemini") || strings.HasPrefix(model, "google/"): + apiKey = cfg.Providers.Gemini.APIKey + authMode = cfg.Providers.Gemini.Auth + apiBase = cfg.Providers.Gemini.APIBase + if apiBase == "" { + apiBase = "https://generativelanguage.googleapis.com/v1beta" + } + + case strings.Contains(lowerModel, "glm") || strings.Contains(lowerModel, "zhipu") || strings.Contains(lowerModel, "zai"): + apiKey = cfg.Providers.Zhipu.APIKey + authMode = cfg.Providers.Zhipu.Auth + apiBase = cfg.Providers.Zhipu.APIBase + if apiBase == "" { + apiBase = "https://open.bigmodel.cn/api/paas/v4" + } + + case strings.Contains(lowerModel, "groq") || strings.HasPrefix(model, "groq/"): + apiKey = cfg.Providers.Groq.APIKey + authMode = cfg.Providers.Groq.Auth + apiBase = cfg.Providers.Groq.APIBase + if apiBase == "" { + apiBase = "https://api.groq.com/openai/v1" + } + + case cfg.Providers.VLLM.APIBase != "": + apiKey = cfg.Providers.VLLM.APIKey + authMode = cfg.Providers.VLLM.Auth + apiBase = cfg.Providers.VLLM.APIBase + + default: + if cfg.Providers.OpenRouter.APIKey != "" { + apiKey = cfg.Providers.OpenRouter.APIKey + authMode = cfg.Providers.OpenRouter.Auth + if cfg.Providers.OpenRouter.APIBase != "" { + apiBase = cfg.Providers.OpenRouter.APIBase + } else { + apiBase = "https://openrouter.ai/api/v1" + } + } else { + return nil, fmt.Errorf("no API key configured for model: %s", model) + } + } + + if apiKey == "" && !strings.HasPrefix(model, "bedrock/") { + return nil, fmt.Errorf("no API key configured for provider (model: %s)", model) + } + + if apiBase == "" { + return nil, fmt.Errorf("no API base configured for provider (model: %s)", model) + } + + return NewHTTPProvider(apiKey, apiBase, authMode), nil +} diff --git a/pkg/providers/types.go b/pkg/providers/types.go new file mode 100644 index 0000000..88b62e9 --- /dev/null +++ b/pkg/providers/types.go @@ -0,0 +1,52 @@ +package providers + +import "context" + +type ToolCall struct { + ID string `json:"id"` + Type string `json:"type,omitempty"` + Function *FunctionCall `json:"function,omitempty"` + Name string `json:"name,omitempty"` + Arguments map[string]interface{} `json:"arguments,omitempty"` +} + +type FunctionCall struct { + Name string `json:"name"` + Arguments string `json:"arguments"` +} + +type LLMResponse struct { + Content string `json:"content"` + ToolCalls []ToolCall `json:"tool_calls,omitempty"` + FinishReason string `json:"finish_reason"` + Usage *UsageInfo `json:"usage,omitempty"` +} + +type UsageInfo struct { + PromptTokens int `json:"prompt_tokens"` + CompletionTokens int `json:"completion_tokens"` + TotalTokens int `json:"total_tokens"` +} + +type Message struct { + Role string `json:"role"` + Content string `json:"content"` + ToolCalls []ToolCall `json:"tool_calls,omitempty"` + ToolCallID string `json:"tool_call_id,omitempty"` +} + +type LLMProvider interface { + Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*LLMResponse, error) + GetDefaultModel() string +} + +type ToolDefinition struct { + Type string `json:"type"` + Function ToolFunctionDefinition `json:"function"` +} + +type ToolFunctionDefinition struct { + Name string `json:"name"` + Description string `json:"description"` + Parameters map[string]interface{} `json:"parameters"` +} diff --git a/pkg/server/server.go b/pkg/server/server.go new file mode 100644 index 0000000..7906ddd --- /dev/null +++ b/pkg/server/server.go @@ -0,0 +1,69 @@ +package server + +import ( + "context" + "fmt" + "net/http" + "time" + + "gitea.kkkk.dev/DBT/clawgo/pkg/config" + "gitea.kkkk.dev/DBT/clawgo/pkg/logger" +) + +type Server struct { + server *http.Server + config *config.Config +} + +func NewServer(cfg *config.Config) *Server { + return &Server{ + config: cfg, + } +} + +func (s *Server) Start() error { + mux := http.NewServeMux() + mux.HandleFunc("/health", s.handleHealth) + mux.HandleFunc("/", s.handleRoot) + + addr := fmt.Sprintf("%s:%d", s.config.Gateway.Host, s.config.Gateway.Port) + s.server = &http.Server{ + Addr: addr, + Handler: mux, + } + + logger.InfoCF("server", "Starting HTTP server", map[string]interface{}{ + "addr": addr, + }) + + // Check/log indicating it's ready for reverse proxying (per requirement) + logger.InfoC("server", "Server ready for reverse proxying") + + go func() { + if err := s.server.ListenAndServe(); err != nil && err != http.ErrServerClosed { + logger.ErrorCF("server", "HTTP server failed", map[string]interface{}{ + "error": err.Error(), + }) + } + }() + + return nil +} + +func (s *Server) Stop(ctx context.Context) error { + if s.server != nil { + logger.InfoC("server", "Stopping HTTP server") + return s.server.Shutdown(ctx) + } + return nil +} + +func (s *Server) handleHealth(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + w.Write([]byte("OK")) +} + +func (s *Server) handleRoot(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + fmt.Fprintf(w, "ClawGo Gateway Running\nTime: %s", time.Now().Format(time.RFC3339)) +} diff --git a/pkg/session/manager.go b/pkg/session/manager.go new file mode 100644 index 0000000..ba4ec03 --- /dev/null +++ b/pkg/session/manager.go @@ -0,0 +1,197 @@ +package session + +import ( + "encoding/json" + "os" + "path/filepath" + "sync" + "time" + + "gitea.kkkk.dev/DBT/clawgo/pkg/providers" +) + +type Session struct { + Key string `json:"key"` + Messages []providers.Message `json:"messages"` + Summary string `json:"summary,omitempty"` + Created time.Time `json:"created"` + Updated time.Time `json:"updated"` + mu sync.RWMutex +} + +type SessionManager struct { + sessions map[string]*Session + mu sync.RWMutex + storage string +} + +func NewSessionManager(storage string) *SessionManager { + sm := &SessionManager{ + sessions: make(map[string]*Session), + storage: storage, + } + + if storage != "" { + os.MkdirAll(storage, 0755) + sm.loadSessions() + } + + return sm +} + +func (sm *SessionManager) GetOrCreate(key string) *Session { + sm.mu.RLock() + session, ok := sm.sessions[key] + sm.mu.RUnlock() + + if ok { + return session + } + + sm.mu.Lock() + defer sm.mu.Unlock() + + // Re-check existence after acquiring Write lock + if session, ok = sm.sessions[key]; ok { + return session + } + + session = &Session{ + Key: key, + Messages: []providers.Message{}, + Created: time.Now(), + Updated: time.Now(), + } + sm.sessions[key] = session + + return session +} + +func (sm *SessionManager) AddMessage(sessionKey, role, content string) { + session := sm.GetOrCreate(sessionKey) + + session.mu.Lock() + defer session.mu.Unlock() + + session.Messages = append(session.Messages, providers.Message{ + Role: role, + Content: content, + }) + session.Updated = time.Now() +} + +func (sm *SessionManager) GetHistory(key string) []providers.Message { + sm.mu.RLock() + session, ok := sm.sessions[key] + sm.mu.RUnlock() + + if !ok { + return []providers.Message{} + } + + session.mu.RLock() + defer session.mu.RUnlock() + + history := make([]providers.Message, len(session.Messages)) + copy(history, session.Messages) + return history +} + +func (sm *SessionManager) GetSummary(key string) string { + sm.mu.RLock() + session, ok := sm.sessions[key] + sm.mu.RUnlock() + + if !ok { + return "" + } + + session.mu.RLock() + defer session.mu.RUnlock() + + return session.Summary +} + +func (sm *SessionManager) SetSummary(key string, summary string) { + sm.mu.RLock() + session, ok := sm.sessions[key] + sm.mu.RUnlock() + + if ok { + session.mu.Lock() + defer session.mu.Unlock() + + session.Summary = summary + session.Updated = time.Now() + } +} + +func (sm *SessionManager) TruncateHistory(key string, keepLast int) { + sm.mu.RLock() + session, ok := sm.sessions[key] + sm.mu.RUnlock() + + if !ok { + return + } + + session.mu.Lock() + defer session.mu.Unlock() + + if len(session.Messages) <= keepLast { + return + } + + session.Messages = session.Messages[len(session.Messages)-keepLast:] + session.Updated = time.Now() +} + +func (sm *SessionManager) Save(session *Session) error { + if sm.storage == "" { + return nil + } + + session.mu.RLock() + defer session.mu.RUnlock() + + sessionPath := filepath.Join(sm.storage, session.Key+".json") + + data, err := json.MarshalIndent(session, "", " ") + if err != nil { + return err + } + + return os.WriteFile(sessionPath, data, 0644) +} + +func (sm *SessionManager) loadSessions() error { + files, err := os.ReadDir(sm.storage) + if err != nil { + return err + } + + for _, file := range files { + if file.IsDir() { + continue + } + + if filepath.Ext(file.Name()) != ".json" { + continue + } + + sessionPath := filepath.Join(sm.storage, file.Name()) + data, err := os.ReadFile(sessionPath) + if err != nil { + continue + } + + var session Session + if err := json.Unmarshal(data, &session); err != nil { + continue + } + + sm.sessions[session.Key] = &session + } + + return nil +} diff --git a/pkg/skills/installer.go b/pkg/skills/installer.go new file mode 100644 index 0000000..f616cd6 --- /dev/null +++ b/pkg/skills/installer.go @@ -0,0 +1,171 @@ +package skills + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "os" + "path/filepath" + "strings" + "time" +) + +type SkillInstaller struct { + workspace string +} + +type AvailableSkill struct { + Name string `json:"name"` + Repository string `json:"repository"` + Description string `json:"description"` + Author string `json:"author"` + Tags []string `json:"tags"` +} + +type BuiltinSkill struct { + Name string `json:"name"` + Path string `json:"path"` + Enabled bool `json:"enabled"` +} + +func NewSkillInstaller(workspace string) *SkillInstaller { + return &SkillInstaller{ + workspace: workspace, + } +} + +func (si *SkillInstaller) InstallFromGitHub(ctx context.Context, repo string) error { + skillDir := filepath.Join(si.workspace, "skills", filepath.Base(repo)) + + if _, err := os.Stat(skillDir); err == nil { + return fmt.Errorf("skill '%s' already exists", filepath.Base(repo)) + } + + url := fmt.Sprintf("https://raw.githubusercontent.com/%s/main/SKILL.md", repo) + + client := &http.Client{Timeout: 15 * time.Second} + req, err := http.NewRequestWithContext(ctx, "GET", url, nil) + if err != nil { + return fmt.Errorf("failed to create request: %w", err) + } + + resp, err := client.Do(req) + if err != nil { + return fmt.Errorf("failed to fetch skill: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != 200 { + return fmt.Errorf("failed to fetch skill: HTTP %d", resp.StatusCode) + } + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to read response: %w", err) + } + + if err := os.MkdirAll(skillDir, 0755); err != nil { + return fmt.Errorf("failed to create skill directory: %w", err) + } + + skillPath := filepath.Join(skillDir, "SKILL.md") + if err := os.WriteFile(skillPath, body, 0644); err != nil { + return fmt.Errorf("failed to write skill file: %w", err) + } + + return nil +} + +func (si *SkillInstaller) Uninstall(skillName string) error { + skillDir := filepath.Join(si.workspace, "skills", skillName) + + if _, err := os.Stat(skillDir); os.IsNotExist(err) { + return fmt.Errorf("skill '%s' not found", skillName) + } + + if err := os.RemoveAll(skillDir); err != nil { + return fmt.Errorf("failed to remove skill: %w", err) + } + + return nil +} + +func (si *SkillInstaller) ListAvailableSkills(ctx context.Context) ([]AvailableSkill, error) { + url := "https://raw.githubusercontent.com/sipeed/clawgo-skills/main/skills.json" + + client := &http.Client{Timeout: 15 * time.Second} + req, err := http.NewRequestWithContext(ctx, "GET", url, nil) + if err != nil { + return nil, fmt.Errorf("failed to create request: %w", err) + } + + resp, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to fetch skills list: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != 200 { + return nil, fmt.Errorf("failed to fetch skills list: HTTP %d", resp.StatusCode) + } + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("failed to read response: %w", err) + } + + var skills []AvailableSkill + if err := json.Unmarshal(body, &skills); err != nil { + return nil, fmt.Errorf("failed to parse skills list: %w", err) + } + + return skills, nil +} + +func (si *SkillInstaller) ListBuiltinSkills() []BuiltinSkill { + builtinSkillsDir := filepath.Join(filepath.Dir(si.workspace), "clawgo", "skills") + + entries, err := os.ReadDir(builtinSkillsDir) + if err != nil { + return nil + } + + var skills []BuiltinSkill + for _, entry := range entries { + if entry.IsDir() { + _ = entry + skillName := entry.Name() + skillFile := filepath.Join(builtinSkillsDir, skillName, "SKILL.md") + + data, err := os.ReadFile(skillFile) + description := "" + if err == nil { + content := string(data) + if idx := strings.Index(content, "\n"); idx > 0 { + firstLine := content[:idx] + if strings.Contains(firstLine, "description:") { + descLine := strings.Index(content[idx:], "\n") + if descLine > 0 { + description = strings.TrimSpace(content[idx+descLine : idx+descLine]) + } + } + } + } + + // skill := BuiltinSkill{ + // Name: skillName, + // Path: description, + // Enabled: true, + // } + + status := "✓" + fmt.Printf(" %s %s\n", status, entry.Name()) + if description != "" { + fmt.Printf(" %s\n", description) + } + } + } + return skills +} diff --git a/pkg/skills/loader.go b/pkg/skills/loader.go new file mode 100644 index 0000000..b5436b6 --- /dev/null +++ b/pkg/skills/loader.go @@ -0,0 +1,284 @@ +package skills + +import ( + "encoding/json" + "fmt" + "os" + "path/filepath" + "regexp" + "strings" +) + +type SkillMetadata struct { + Name string `json:"name"` + Description string `json:"description"` +} + +type SkillInfo struct { + Name string `json:"name"` + Path string `json:"path"` + Source string `json:"source"` + Description string `json:"description"` +} + +type SkillsLoader struct { + workspace string + workspaceSkills string // workspace skills (项目级别) + globalSkills string // 全局 skills (~/.clawgo/skills) + builtinSkills string // 内置 skills +} + +func NewSkillsLoader(workspace string, globalSkills string, builtinSkills string) *SkillsLoader { + return &SkillsLoader{ + workspace: workspace, + workspaceSkills: filepath.Join(workspace, "skills"), + globalSkills: globalSkills, // ~/.clawgo/skills + builtinSkills: builtinSkills, + } +} + +func (sl *SkillsLoader) ListSkills() []SkillInfo { + skills := make([]SkillInfo, 0) + + if sl.workspaceSkills != "" { + if dirs, err := os.ReadDir(sl.workspaceSkills); err == nil { + for _, dir := range dirs { + if dir.IsDir() { + skillFile := filepath.Join(sl.workspaceSkills, dir.Name(), "SKILL.md") + if _, err := os.Stat(skillFile); err == nil { + info := SkillInfo{ + Name: dir.Name(), + Path: skillFile, + Source: "workspace", + } + metadata := sl.getSkillMetadata(skillFile) + if metadata != nil { + info.Description = metadata.Description + } + skills = append(skills, info) + } + } + } + } + } + + // 全局 skills (~/.clawgo/skills) - 被 workspace skills 覆盖 + if sl.globalSkills != "" { + if dirs, err := os.ReadDir(sl.globalSkills); err == nil { + for _, dir := range dirs { + if dir.IsDir() { + skillFile := filepath.Join(sl.globalSkills, dir.Name(), "SKILL.md") + if _, err := os.Stat(skillFile); err == nil { + // 检查是否已被 workspace skills 覆盖 + exists := false + for _, s := range skills { + if s.Name == dir.Name() && s.Source == "workspace" { + exists = true + break + } + } + if exists { + continue + } + + info := SkillInfo{ + Name: dir.Name(), + Path: skillFile, + Source: "global", + } + metadata := sl.getSkillMetadata(skillFile) + if metadata != nil { + info.Description = metadata.Description + } + skills = append(skills, info) + } + } + } + } + } + + if sl.builtinSkills != "" { + if dirs, err := os.ReadDir(sl.builtinSkills); err == nil { + for _, dir := range dirs { + if dir.IsDir() { + skillFile := filepath.Join(sl.builtinSkills, dir.Name(), "SKILL.md") + if _, err := os.Stat(skillFile); err == nil { + // 检查是否已被 workspace 或 global skills 覆盖 + exists := false + for _, s := range skills { + if s.Name == dir.Name() && (s.Source == "workspace" || s.Source == "global") { + exists = true + break + } + } + if exists { + continue + } + + info := SkillInfo{ + Name: dir.Name(), + Path: skillFile, + Source: "builtin", + } + metadata := sl.getSkillMetadata(skillFile) + if metadata != nil { + info.Description = metadata.Description + } + skills = append(skills, info) + } + } + } + } + } + + return skills +} + +func (sl *SkillsLoader) LoadSkill(name string) (string, bool) { + // 1. 优先从 workspace skills 加载(项目级别) + if sl.workspaceSkills != "" { + skillFile := filepath.Join(sl.workspaceSkills, name, "SKILL.md") + if content, err := os.ReadFile(skillFile); err == nil { + return sl.stripFrontmatter(string(content)), true + } + } + + // 2. 其次从全局 skills 加载 (~/.clawgo/skills) + if sl.globalSkills != "" { + skillFile := filepath.Join(sl.globalSkills, name, "SKILL.md") + if content, err := os.ReadFile(skillFile); err == nil { + return sl.stripFrontmatter(string(content)), true + } + } + + // 3. 最后从内置 skills 加载 + if sl.builtinSkills != "" { + skillFile := filepath.Join(sl.builtinSkills, name, "SKILL.md") + if content, err := os.ReadFile(skillFile); err == nil { + return sl.stripFrontmatter(string(content)), true + } + } + + return "", false +} + +func (sl *SkillsLoader) LoadSkillsForContext(skillNames []string) string { + if len(skillNames) == 0 { + return "" + } + + var parts []string + for _, name := range skillNames { + content, ok := sl.LoadSkill(name) + if ok { + parts = append(parts, fmt.Sprintf("### Skill: %s\n\n%s", name, content)) + } + } + + return strings.Join(parts, "\n\n---\n\n") +} + +func (sl *SkillsLoader) BuildSkillsSummary() string { + allSkills := sl.ListSkills() + if len(allSkills) == 0 { + return "" + } + + var lines []string + lines = append(lines, "") + for _, s := range allSkills { + escapedName := escapeXML(s.Name) + escapedDesc := escapeXML(s.Description) + escapedPath := escapeXML(s.Path) + + lines = append(lines, fmt.Sprintf(" ")) + lines = append(lines, fmt.Sprintf(" %s", escapedName)) + lines = append(lines, fmt.Sprintf(" %s", escapedDesc)) + lines = append(lines, fmt.Sprintf(" %s", escapedPath)) + lines = append(lines, fmt.Sprintf(" %s", s.Source)) + lines = append(lines, " ") + } + lines = append(lines, "") + + return strings.Join(lines, "\n") +} + +func (sl *SkillsLoader) getSkillMetadata(skillPath string) *SkillMetadata { + content, err := os.ReadFile(skillPath) + if err != nil { + return nil + } + + frontmatter := sl.extractFrontmatter(string(content)) + if frontmatter == "" { + return &SkillMetadata{ + Name: filepath.Base(filepath.Dir(skillPath)), + } + } + + // Try JSON first (for backward compatibility) + var jsonMeta struct { + Name string `json:"name"` + Description string `json:"description"` + } + if err := json.Unmarshal([]byte(frontmatter), &jsonMeta); err == nil { + return &SkillMetadata{ + Name: jsonMeta.Name, + Description: jsonMeta.Description, + } + } + + // Fall back to simple YAML parsing + yamlMeta := sl.parseSimpleYAML(frontmatter) + return &SkillMetadata{ + Name: yamlMeta["name"], + Description: yamlMeta["description"], + } +} + +// parseSimpleYAML parses simple key: value YAML format +// Example: name: github\n description: "..." +func (sl *SkillsLoader) parseSimpleYAML(content string) map[string]string { + result := make(map[string]string) + + for _, line := range strings.Split(content, "\n") { + line = strings.TrimSpace(line) + if line == "" || strings.HasPrefix(line, "#") { + continue + } + + parts := strings.SplitN(line, ":", 2) + if len(parts) == 2 { + key := strings.TrimSpace(parts[0]) + value := strings.TrimSpace(parts[1]) + // Remove quotes if present + value = strings.Trim(value, "\"'") + result[key] = value + } + } + + return result +} + +func (sl *SkillsLoader) extractFrontmatter(content string) string { + // (?s) enables DOTALL mode so . matches newlines + // Match first ---, capture everything until next --- on its own line + re := regexp.MustCompile(`(?s)^---\n(.*)\n---`) + match := re.FindStringSubmatch(content) + if len(match) > 1 { + return match[1] + } + return "" +} + +func (sl *SkillsLoader) stripFrontmatter(content string) string { + re := regexp.MustCompile(`^---\n.*?\n---\n`) + return re.ReplaceAllString(content, "") +} + +func escapeXML(s string) string { + s = strings.ReplaceAll(s, "&", "&") + s = strings.ReplaceAll(s, "<", "<") + s = strings.ReplaceAll(s, ">", ">") + return s +} diff --git a/pkg/tools/base.go b/pkg/tools/base.go new file mode 100644 index 0000000..1bf53f7 --- /dev/null +++ b/pkg/tools/base.go @@ -0,0 +1,21 @@ +package tools + +import "context" + +type Tool interface { + Name() string + Description() string + Parameters() map[string]interface{} + Execute(ctx context.Context, args map[string]interface{}) (string, error) +} + +func ToolToSchema(tool Tool) map[string]interface{} { + return map[string]interface{}{ + "type": "function", + "function": map[string]interface{}{ + "name": tool.Name(), + "description": tool.Description(), + "parameters": tool.Parameters(), + }, + } +} diff --git a/pkg/tools/camera.go b/pkg/tools/camera.go new file mode 100644 index 0000000..b28e6e0 --- /dev/null +++ b/pkg/tools/camera.go @@ -0,0 +1,71 @@ +package tools + +import ( + "context" + "fmt" + "os" + "os/exec" + "path/filepath" + "time" +) + +type CameraTool struct { + workspace string +} + +func NewCameraTool(workspace string) *CameraTool { + return &CameraTool{ + workspace: workspace, + } +} + +func (t *CameraTool) Name() string { + return "camera_snap" +} + +func (t *CameraTool) Description() string { + return "Take a photo using the system camera (/dev/video0) and save to workspace." +} + +func (t *CameraTool) Parameters() map[string]interface{} { + return map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "filename": map[string]interface{}{ + "type": "string", + "description": "Optional filename (default: snap_TIMESTAMP.jpg)", + }, + }, + } +} + +func (t *CameraTool) Execute(ctx context.Context, args map[string]interface{}) (string, error) { + filename := "" + if v, ok := args["filename"].(string); ok && v != "" { + filename = v + } else { + filename = fmt.Sprintf("snap_%d.jpg", time.Now().Unix()) + } + + // Ensure filename is safe and within workspace + filename = filepath.Clean(filename) + if filepath.IsAbs(filename) { + return "", fmt.Errorf("filename must be relative to workspace") + } + + outputPath := filepath.Join(t.workspace, filename) + + // Check if video device exists + if _, err := os.Stat("/dev/video0"); os.IsNotExist(err) { + return "", fmt.Errorf("camera device /dev/video0 not found") + } + + // ffmpeg -y -f video4linux2 -i /dev/video0 -vframes 1 -q:v 2 output.jpg + cmd := exec.CommandContext(ctx, "ffmpeg", "-y", "-f", "video4linux2", "-i", "/dev/video0", "-vframes", "1", "-q:v", "2", outputPath) + output, err := cmd.CombinedOutput() + if err != nil { + return fmt.Sprintf("Error taking photo: %v\nOutput: %s", err, string(output)), nil + } + + return fmt.Sprintf("Photo saved to %s", filename), nil +} diff --git a/pkg/tools/edit.go b/pkg/tools/edit.go new file mode 100644 index 0000000..339148e --- /dev/null +++ b/pkg/tools/edit.go @@ -0,0 +1,176 @@ +package tools + +import ( + "context" + "fmt" + "os" + "path/filepath" + "strings" +) + +// EditFileTool edits a file by replacing old_text with new_text. +// The old_text must exist exactly in the file. +type EditFileTool struct { + allowedDir string // Optional directory restriction for security +} + +// NewEditFileTool creates a new EditFileTool with optional directory restriction. +func NewEditFileTool(allowedDir string) *EditFileTool { + return &EditFileTool{ + allowedDir: allowedDir, + } +} + +func (t *EditFileTool) Name() string { + return "edit_file" +} + +func (t *EditFileTool) Description() string { + return "Edit a file by replacing old_text with new_text. The old_text must exist exactly in the file." +} + +func (t *EditFileTool) Parameters() map[string]interface{} { + return map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "path": map[string]interface{}{ + "type": "string", + "description": "The file path to edit", + }, + "old_text": map[string]interface{}{ + "type": "string", + "description": "The exact text to find and replace", + }, + "new_text": map[string]interface{}{ + "type": "string", + "description": "The text to replace with", + }, + }, + "required": []string{"path", "old_text", "new_text"}, + } +} + +func (t *EditFileTool) Execute(ctx context.Context, args map[string]interface{}) (string, error) { + path, ok := args["path"].(string) + if !ok { + return "", fmt.Errorf("path is required") + } + + oldText, ok := args["old_text"].(string) + if !ok { + return "", fmt.Errorf("old_text is required") + } + + newText, ok := args["new_text"].(string) + if !ok { + return "", fmt.Errorf("new_text is required") + } + + // Resolve path and enforce directory restriction if configured + resolvedPath := path + if filepath.IsAbs(path) { + resolvedPath = filepath.Clean(path) + } else { + abs, err := filepath.Abs(path) + if err != nil { + return "", fmt.Errorf("failed to resolve path: %w", err) + } + resolvedPath = abs + } + + // Check directory restriction + if t.allowedDir != "" { + allowedAbs, err := filepath.Abs(t.allowedDir) + if err != nil { + return "", fmt.Errorf("failed to resolve allowed directory: %w", err) + } + if !strings.HasPrefix(resolvedPath, allowedAbs) { + return "", fmt.Errorf("path %s is outside allowed directory %s", path, t.allowedDir) + } + } + + if _, err := os.Stat(resolvedPath); os.IsNotExist(err) { + return "", fmt.Errorf("file not found: %s", path) + } + + content, err := os.ReadFile(resolvedPath) + if err != nil { + return "", fmt.Errorf("failed to read file: %w", err) + } + + contentStr := string(content) + + if !strings.Contains(contentStr, oldText) { + return "", fmt.Errorf("old_text not found in file. Make sure it matches exactly") + } + + count := strings.Count(contentStr, oldText) + if count > 1 { + return "", fmt.Errorf("old_text appears %d times. Please provide more context to make it unique", count) + } + + newContent := strings.Replace(contentStr, oldText, newText, 1) + + if err := os.WriteFile(resolvedPath, []byte(newContent), 0644); err != nil { + return "", fmt.Errorf("failed to write file: %w", err) + } + + return fmt.Sprintf("Successfully edited %s", path), nil +} + +type AppendFileTool struct{} + +func NewAppendFileTool() *AppendFileTool { + return &AppendFileTool{} +} + +func (t *AppendFileTool) Name() string { + return "append_file" +} + +func (t *AppendFileTool) Description() string { + return "Append content to the end of a file" +} + +func (t *AppendFileTool) Parameters() map[string]interface{} { + return map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "path": map[string]interface{}{ + "type": "string", + "description": "The file path to append to", + }, + "content": map[string]interface{}{ + "type": "string", + "description": "The content to append", + }, + }, + "required": []string{"path", "content"}, + } +} + +func (t *AppendFileTool) Execute(ctx context.Context, args map[string]interface{}) (string, error) { + path, ok := args["path"].(string) + if !ok { + return "", fmt.Errorf("path is required") + } + + content, ok := args["content"].(string) + if !ok { + return "", fmt.Errorf("content is required") + } + + filePath := filepath.Clean(path) + + f, err := os.OpenFile(filePath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + return "", fmt.Errorf("failed to open file: %w", err) + } + defer f.Close() + + if _, err := f.WriteString(content); err != nil { + return "", fmt.Errorf("failed to append to file: %w", err) + } + + return fmt.Sprintf("Successfully appended to %s", path), nil +} diff --git a/pkg/tools/filesystem.go b/pkg/tools/filesystem.go new file mode 100644 index 0000000..49d0a1a --- /dev/null +++ b/pkg/tools/filesystem.go @@ -0,0 +1,247 @@ +package tools + +import ( + "context" + "fmt" + "io" + "os" + "path/filepath" + "sort" + "strings" +) + +type ReadFileTool struct{} + +func (t *ReadFileTool) Name() string { + return "read_file" +} + +func (t *ReadFileTool) Description() string { + return "Read the contents of a file" +} + +func (t *ReadFileTool) Parameters() map[string]interface{} { + return map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "path": map[string]interface{}{ + "type": "string", + "description": "Path to the file to read", + }, + "limit": map[string]interface{}{ + "type": "integer", + "description": "Maximum number of bytes to read", + }, + "offset": map[string]interface{}{ + "type": "integer", + "description": "Byte offset to start reading from", + }, + }, + "required": []string{"path"}, + } +} + +func (t *ReadFileTool) Execute(ctx context.Context, args map[string]interface{}) (string, error) { + path, ok := args["path"].(string) + if !ok { + return "", fmt.Errorf("path is required") + } + + limit := int64(0) + if val, ok := args["limit"].(float64); ok { + limit = int64(val) + } + + offset := int64(0) + if val, ok := args["offset"].(float64); ok { + offset = int64(val) + } + + f, err := os.Open(path) + if err != nil { + return "", fmt.Errorf("failed to open file: %w", err) + } + defer f.Close() + + info, err := f.Stat() + if err != nil { + return "", fmt.Errorf("failed to stat file: %w", err) + } + + if offset >= info.Size() { + return "", nil // Offset beyond file size + } + + if _, err := f.Seek(offset, 0); err != nil { + return "", fmt.Errorf("failed to seek: %w", err) + } + + // Default read all if limit is not set or 0 + readLimit := info.Size() - offset + if limit > 0 && limit < readLimit { + readLimit = limit + } + + // Safety cap: don't read insanely large files into memory unless requested + // But tool says "read file", so we respect limit. + // If limit is 0 (unspecified), maybe we should default to a reasonable max? + // The original code used os.ReadFile which reads ALL. So I should probably keep that behavior if limit is 0. + // However, if limit is explicitly passed as 0, it might mean "read 0 bytes". But usually in JSON APIs 0 means default or none. + // Let's assume limit > 0 means limit. If limit <= 0, read until EOF. + + var content []byte + if limit > 0 { + content = make([]byte, readLimit) + n, err := io.ReadFull(f, content) + if err != nil && err != io.EOF && err != io.ErrUnexpectedEOF { + return "", fmt.Errorf("failed to read file: %w", err) + } + content = content[:n] + } else { + // Read until EOF + content, err = io.ReadAll(f) + if err != nil { + return "", fmt.Errorf("failed to read file: %w", err) + } + } + + return string(content), nil +} + +type WriteFileTool struct{} + +func (t *WriteFileTool) Name() string { + return "write_file" +} + +func (t *WriteFileTool) Description() string { + return "Write content to a file" +} + +func (t *WriteFileTool) Parameters() map[string]interface{} { + return map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "path": map[string]interface{}{ + "type": "string", + "description": "Path to the file to write", + }, + "content": map[string]interface{}{ + "type": "string", + "description": "Content to write to the file", + }, + }, + "required": []string{"path", "content"}, + } +} + +func (t *WriteFileTool) Execute(ctx context.Context, args map[string]interface{}) (string, error) { + path, ok := args["path"].(string) + if !ok { + return "", fmt.Errorf("path is required") + } + + content, ok := args["content"].(string) + if !ok { + return "", fmt.Errorf("content is required") + } + + dir := filepath.Dir(path) + if err := os.MkdirAll(dir, 0755); err != nil { + return "", fmt.Errorf("failed to create directory: %w", err) + } + + if err := os.WriteFile(path, []byte(content), 0644); err != nil { + return "", fmt.Errorf("failed to write file: %w", err) + } + + return "File written successfully", nil +} + +type ListDirTool struct{} + +func (t *ListDirTool) Name() string { + return "list_dir" +} + +func (t *ListDirTool) Description() string { + return "List files and directories in a path" +} + +func (t *ListDirTool) Parameters() map[string]interface{} { + return map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "path": map[string]interface{}{ + "type": "string", + "description": "Path to list", + }, + "recursive": map[string]interface{}{ + "type": "boolean", + "description": "List recursively", + }, + }, + "required": []string{"path"}, + } +} + +func (t *ListDirTool) Execute(ctx context.Context, args map[string]interface{}) (string, error) { + path, ok := args["path"].(string) + if !ok { + path = "." + } + + recursive, _ := args["recursive"].(bool) + + var result strings.Builder + + if recursive { + err := filepath.Walk(path, func(p string, info os.FileInfo, err error) error { + if err != nil { + return err + } + relPath, err := filepath.Rel(path, p) + if err != nil { + relPath = p + } + if relPath == "." { + return nil + } + if info.IsDir() { + result.WriteString(fmt.Sprintf("DIR: %s\n", relPath)) + } else { + result.WriteString(fmt.Sprintf("FILE: %s\n", relPath)) + } + return nil + }) + if err != nil { + return "", fmt.Errorf("failed to walk directory: %w", err) + } + } else { + entries, err := os.ReadDir(path) + if err != nil { + return "", fmt.Errorf("failed to read directory: %w", err) + } + + // Sort entries: directories first, then files + sort.Slice(entries, func(i, j int) bool { + if entries[i].IsDir() && !entries[j].IsDir() { + return true + } + if !entries[i].IsDir() && entries[j].IsDir() { + return false + } + return entries[i].Name() < entries[j].Name() + }) + + for _, entry := range entries { + if entry.IsDir() { + result.WriteString(fmt.Sprintf("DIR: %s\n", entry.Name())) + } else { + result.WriteString(fmt.Sprintf("FILE: %s\n", entry.Name())) + } + } + } + + return result.String(), nil +} diff --git a/pkg/tools/memory.go b/pkg/tools/memory.go new file mode 100644 index 0000000..34a136c --- /dev/null +++ b/pkg/tools/memory.go @@ -0,0 +1,229 @@ +package tools + +import ( + "bufio" + "context" + "fmt" + "os" + "path/filepath" + "strings" +) + +type MemorySearchTool struct { + workspace string +} + +func NewMemorySearchTool(workspace string) *MemorySearchTool { + return &MemorySearchTool{ + workspace: workspace, + } +} + +func (t *MemorySearchTool) Name() string { + return "memory_search" +} + +func (t *MemorySearchTool) Description() string { + return "Semantically search MEMORY.md and memory/*.md files for information. Returns relevant snippets (paragraphs) containing the query terms." +} + +func (t *MemorySearchTool) Parameters() map[string]interface{} { + return map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "query": map[string]interface{}{ + "type": "string", + "description": "Search query keywords (e.g., 'docker deploy project')", + }, + "maxResults": map[string]interface{}{ + "type": "integer", + "description": "Maximum number of results to return", + "default": 5, + }, + }, + "required": []string{"query"}, + } +} + +type searchResult struct { + file string + lineNum int + content string + score int +} + +func (t *MemorySearchTool) Execute(ctx context.Context, args map[string]interface{}) (string, error) { + query, ok := args["query"].(string) + if !ok || query == "" { + return "", fmt.Errorf("query is required") + } + + maxResults := 5 + if m, ok := args["maxResults"].(float64); ok { + maxResults = int(m) + } + + keywords := strings.Fields(strings.ToLower(query)) + if len(keywords) == 0 { + return "Please provide search keywords.", nil + } + + files := t.getMemoryFiles() + var results []searchResult + + for _, file := range files { + matches, err := t.searchFile(file, keywords) + if err != nil { + continue // skip unreadable files + } + results = append(results, matches...) + } + + // Simple ranking: sort by score (number of keyword matches) desc + // Ideally use a stable sort or more sophisticated scoring + for i := 0; i < len(results); i++ { + for j := i + 1; j < len(results); j++ { + if results[j].score > results[i].score { + results[i], results[j] = results[j], results[i] + } + } + } + + if len(results) > maxResults { + results = results[:maxResults] + } + + if len(results) == 0 { + return fmt.Sprintf("No memory found for query: %s", query), nil + } + + var sb strings.Builder + sb.WriteString(fmt.Sprintf("Found %d memories for '%s':\n\n", len(results), query)) + for _, res := range results { + relPath, _ := filepath.Rel(t.workspace, res.file) + sb.WriteString(fmt.Sprintf("--- Source: %s:%d ---\n%s\n\n", relPath, res.lineNum, res.content)) + } + + return sb.String(), nil +} + +func (t *MemorySearchTool) getMemoryFiles() []string { + var files []string + + // Check main MEMORY.md + mainMem := filepath.Join(t.workspace, "MEMORY.md") + if _, err := os.Stat(mainMem); err == nil { + files = append(files, mainMem) + } + + // Check memory/ directory + memDir := filepath.Join(t.workspace, "memory") + entries, err := os.ReadDir(memDir) + if err == nil { + for _, entry := range entries { + if !entry.IsDir() && strings.HasSuffix(entry.Name(), ".md") { + files = append(files, filepath.Join(memDir, entry.Name())) + } + } + } + return files +} + +// searchFile parses the markdown file into blocks (paragraphs/list items) and searches them +func (t *MemorySearchTool) searchFile(path string, keywords []string) ([]searchResult, error) { + file, err := os.Open(path) + if err != nil { + return nil, err + } + defer file.Close() + + var results []searchResult + scanner := bufio.NewScanner(file) + + var currentBlock strings.Builder + var blockStartLine int = 1 + var currentLineNum int = 0 + var lastHeading string + + processBlock := func() { + content := strings.TrimSpace(currentBlock.String()) + if content != "" { + lowerContent := strings.ToLower(content) + score := 0 + // Calculate score: how many keywords are present? + for _, kw := range keywords { + if strings.Contains(lowerContent, kw) { + score++ + } + } + + // Add bonus if heading matches + if lastHeading != "" { + lowerHeading := strings.ToLower(lastHeading) + for _, kw := range keywords { + if strings.Contains(lowerHeading, kw) { + score++ + } + } + // Prepend heading context if not already part of block + if !strings.HasPrefix(content, "#") { + content = fmt.Sprintf("[%s]\n%s", lastHeading, content) + } + } + + // Only keep if at least one keyword matched + if score > 0 { + results = append(results, searchResult{ + file: path, + lineNum: blockStartLine, + content: content, + score: score, + }) + } + } + currentBlock.Reset() + } + + for scanner.Scan() { + currentLineNum++ + line := scanner.Text() + trimmed := strings.TrimSpace(line) + + // Markdown Block Logic: + // 1. Headers start new blocks + // 2. Empty lines separate blocks + // 3. List items start new blocks (optional, but good for logs) + + isHeader := strings.HasPrefix(trimmed, "#") + isEmpty := trimmed == "" + isList := strings.HasPrefix(trimmed, "- ") || strings.HasPrefix(trimmed, "* ") || (len(trimmed) > 3 && trimmed[1] == '.' && trimmed[2] == ' ') + + if isHeader { + processBlock() // Flush previous + lastHeading = strings.TrimLeft(trimmed, "# ") + blockStartLine = currentLineNum + currentBlock.WriteString(line + "\n") + processBlock() // Headers are their own blocks too + continue + } + + if isEmpty { + processBlock() // Flush previous + blockStartLine = currentLineNum + 1 + continue + } + + if isList { + processBlock() // Flush previous (treat list items as atomic for better granularity) + blockStartLine = currentLineNum + } + + if currentBlock.Len() == 0 { + blockStartLine = currentLineNum + } + currentBlock.WriteString(line + "\n") + } + + processBlock() // Flush last block + return results, nil +} diff --git a/pkg/tools/message.go b/pkg/tools/message.go new file mode 100644 index 0000000..e090234 --- /dev/null +++ b/pkg/tools/message.go @@ -0,0 +1,87 @@ +package tools + +import ( + "context" + "fmt" +) + +type SendCallback func(channel, chatID, content string) error + +type MessageTool struct { + sendCallback SendCallback + defaultChannel string + defaultChatID string +} + +func NewMessageTool() *MessageTool { + return &MessageTool{} +} + +func (t *MessageTool) Name() string { + return "message" +} + +func (t *MessageTool) Description() string { + return "Send a message to user on a chat channel. Use this when you want to communicate something." +} + +func (t *MessageTool) Parameters() map[string]interface{} { + return map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "content": map[string]interface{}{ + "type": "string", + "description": "The message content to send", + }, + "channel": map[string]interface{}{ + "type": "string", + "description": "Optional: target channel (telegram, whatsapp, etc.)", + }, + "chat_id": map[string]interface{}{ + "type": "string", + "description": "Optional: target chat/user ID", + }, + }, + "required": []string{"content"}, + } +} + +func (t *MessageTool) SetContext(channel, chatID string) { + t.defaultChannel = channel + t.defaultChatID = chatID +} + +func (t *MessageTool) SetSendCallback(callback SendCallback) { + t.sendCallback = callback +} + +func (t *MessageTool) Execute(ctx context.Context, args map[string]interface{}) (string, error) { + content, ok := args["content"].(string) + if !ok { + return "", fmt.Errorf("content is required") + } + + channel, _ := args["channel"].(string) + chatID, _ := args["chat_id"].(string) + + if channel == "" { + channel = t.defaultChannel + } + if chatID == "" { + chatID = t.defaultChatID + } + + if channel == "" || chatID == "" { + return "Error: No target channel/chat specified", nil + } + + if t.sendCallback == nil { + return "Error: Message sending not configured", nil + } + + if err := t.sendCallback(channel, chatID, content); err != nil { + return fmt.Sprintf("Error sending message: %v", err), nil + } + + return fmt.Sprintf("Message sent to %s:%s", channel, chatID), nil +} diff --git a/pkg/tools/registry.go b/pkg/tools/registry.go new file mode 100644 index 0000000..3a14c9e --- /dev/null +++ b/pkg/tools/registry.go @@ -0,0 +1,116 @@ +package tools + +import ( + "context" + "fmt" + "sync" + "time" + + "gitea.kkkk.dev/DBT/clawgo/pkg/logger" +) + +type ToolRegistry struct { + tools map[string]Tool + mu sync.RWMutex +} + +func NewToolRegistry() *ToolRegistry { + return &ToolRegistry{ + tools: make(map[string]Tool), + } +} + +func (r *ToolRegistry) Register(tool Tool) { + r.mu.Lock() + defer r.mu.Unlock() + r.tools[tool.Name()] = tool +} + +func (r *ToolRegistry) Get(name string) (Tool, bool) { + r.mu.RLock() + defer r.mu.RUnlock() + tool, ok := r.tools[name] + return tool, ok +} + +func (r *ToolRegistry) Execute(ctx context.Context, name string, args map[string]interface{}) (string, error) { + logger.InfoCF("tool", "Tool execution started", + map[string]interface{}{ + "tool": name, + "args": args, + }) + + tool, ok := r.Get(name) + if !ok { + logger.ErrorCF("tool", "Tool not found", + map[string]interface{}{ + "tool": name, + }) + return "", fmt.Errorf("tool '%s' not found", name) + } + + start := time.Now() + result, err := tool.Execute(ctx, args) + duration := time.Since(start) + + if err != nil { + logger.ErrorCF("tool", "Tool execution failed", + map[string]interface{}{ + "tool": name, + "duration": duration.Milliseconds(), + "error": err.Error(), + }) + } else { + logger.InfoCF("tool", "Tool execution completed", + map[string]interface{}{ + "tool": name, + "duration_ms": duration.Milliseconds(), + "result_length": len(result), + }) + } + + return result, err +} + +func (r *ToolRegistry) GetDefinitions() []map[string]interface{} { + r.mu.RLock() + defer r.mu.RUnlock() + + definitions := make([]map[string]interface{}, 0, len(r.tools)) + for _, tool := range r.tools { + definitions = append(definitions, ToolToSchema(tool)) + } + return definitions +} + +// List returns a list of all registered tool names. +func (r *ToolRegistry) List() []string { + r.mu.RLock() + defer r.mu.RUnlock() + + names := make([]string, 0, len(r.tools)) + for name := range r.tools { + names = append(names, name) + } + return names +} + +// Count returns the number of registered tools. +func (r *ToolRegistry) Count() int { + r.mu.RLock() + defer r.mu.RUnlock() + return len(r.tools) +} + +// GetSummaries returns human-readable summaries of all registered tools. +// Returns a slice of "name - description" strings. +func (r *ToolRegistry) GetSummaries() []string { + r.mu.RLock() + defer r.mu.RUnlock() + + summaries := make([]string, 0, len(r.tools)) + for _, tool := range r.tools { + summaries = append(summaries, fmt.Sprintf("- `%s` - %s", tool.Name(), tool.Description())) + } + return summaries +} diff --git a/pkg/tools/remind.go b/pkg/tools/remind.go new file mode 100644 index 0000000..0fff619 --- /dev/null +++ b/pkg/tools/remind.go @@ -0,0 +1,122 @@ +package tools + +import ( + "context" + "fmt" + "time" + + "gitea.kkkk.dev/DBT/clawgo/pkg/cron" +) + +type RemindTool struct { + cs *cron.CronService +} + +func NewRemindTool(cs *cron.CronService) *RemindTool { + return &RemindTool{cs: cs} +} + +func (t *RemindTool) Name() string { + return "remind" +} + +func (t *RemindTool) Description() string { + return "Set a reminder for a future time" +} + +func (t *RemindTool) Parameters() map[string]interface{} { + return map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "message": map[string]interface{}{ + "type": "string", + "description": "The reminder message", + }, + "time_expr": map[string]interface{}{ + "type": "string", + "description": "When to remind (e.g., '10m', '1h', '2026-02-12 10:00')", + }, + }, + "required": []string{"message", "time_expr"}, + } +} + +func (t *RemindTool) Execute(ctx context.Context, args map[string]interface{}) (string, error) { + if t.cs == nil { + return "", fmt.Errorf("cron service not available") + } + + message, ok := args["message"].(string) + if !ok { + return "", fmt.Errorf("message is required") + } + + timeExpr, ok := args["time_expr"].(string) + if !ok { + return "", fmt.Errorf("time_expr is required") + } + + // Try duration first (e.g., "10m", "1h30m") + if d, err := time.ParseDuration(timeExpr); err == nil { + at := time.Now().Add(d).UnixMilli() + schedule := cron.CronSchedule{ + Kind: "at", + AtMS: &at, + } + job, err := t.cs.AddJob("Reminder", schedule, message, true, "", "") // deliver=true, channel="" means default + if err != nil { + return "", fmt.Errorf("failed to schedule reminder: %w", err) + } + return fmt.Sprintf("Reminder set for %s (in %s). Job ID: %s", time.UnixMilli(at).Format(time.RFC1123), d, job.ID), nil + } + + // Try absolute date/time formats + formats := []string{ + "2006-01-02 15:04", + "2006-01-02 15:04:05", + "15:04", + "15:04:05", + } + + var parsedTime time.Time + var parseErr error + parsed := false + + for _, layout := range formats { + if t, err := time.ParseInLocation(layout, timeExpr, time.Local); err == nil { + parsedTime = t + parsed = true + // If format was time-only, use today or tomorrow + if layout == "15:04" || layout == "15:04:05" { + now := time.Now() + // Combine today's date with parsed time + combined := time.Date(now.Year(), now.Month(), now.Day(), t.Hour(), t.Minute(), t.Second(), 0, time.Local) + if combined.Before(now) { + // If time passed today, assume tomorrow + combined = combined.Add(24 * time.Hour) + } + parsedTime = combined + } + break + } else { + parseErr = err + } + } + + if !parsed { + return "", fmt.Errorf("could not parse time expression '%s': %v", timeExpr, parseErr) + } + + at := parsedTime.UnixMilli() + schedule := cron.CronSchedule{ + Kind: "at", + AtMS: &at, + } + + job, err := t.cs.AddJob("Reminder", schedule, message, true, "", "") + if err != nil { + return "", fmt.Errorf("failed to schedule reminder: %w", err) + } + + return fmt.Sprintf("Reminder set for %s. Job ID: %s", parsedTime.Format(time.RFC1123), job.ID), nil +} diff --git a/pkg/tools/shell.go b/pkg/tools/shell.go new file mode 100644 index 0000000..d8aea40 --- /dev/null +++ b/pkg/tools/shell.go @@ -0,0 +1,202 @@ +package tools + +import ( + "bytes" + "context" + "fmt" + "os" + "os/exec" + "path/filepath" + "regexp" + "strings" + "time" +) + +type ExecTool struct { + workingDir string + timeout time.Duration + denyPatterns []*regexp.Regexp + allowPatterns []*regexp.Regexp + restrictToWorkspace bool +} + +func NewExecTool(workingDir string) *ExecTool { + denyPatterns := []*regexp.Regexp{ + regexp.MustCompile(`\brm\s+-[rf]{1,2}\b`), + regexp.MustCompile(`\bdel\s+/[fq]\b`), + regexp.MustCompile(`\brmdir\s+/s\b`), + regexp.MustCompile(`\b(format|mkfs|diskpart)\b\s`), // Match disk wiping commands (must be followed by space/args) + regexp.MustCompile(`\bdd\s+if=`), + regexp.MustCompile(`>\s*/dev/sd[a-z]\b`), // Block writes to disk devices (but allow /dev/null) + regexp.MustCompile(`\b(shutdown|reboot|poweroff)\b`), + regexp.MustCompile(`:\(\)\s*\{.*\};\s*:`), + } + + return &ExecTool{ + workingDir: workingDir, + timeout: 60 * time.Second, + denyPatterns: denyPatterns, + allowPatterns: nil, + restrictToWorkspace: false, + } +} + +func (t *ExecTool) Name() string { + return "exec" +} + +func (t *ExecTool) Description() string { + return "Execute a shell command and return its output. Use with caution." +} + +func (t *ExecTool) Parameters() map[string]interface{} { + return map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "command": map[string]interface{}{ + "type": "string", + "description": "The shell command to execute", + }, + "working_dir": map[string]interface{}{ + "type": "string", + "description": "Optional working directory for the command", + }, + }, + "required": []string{"command"}, + } +} + +func (t *ExecTool) Execute(ctx context.Context, args map[string]interface{}) (string, error) { + command, ok := args["command"].(string) + if !ok { + return "", fmt.Errorf("command is required") + } + + cwd := t.workingDir + if wd, ok := args["working_dir"].(string); ok && wd != "" { + cwd = wd + } + + if cwd == "" { + wd, err := os.Getwd() + if err == nil { + cwd = wd + } + } + + if guardError := t.guardCommand(command, cwd); guardError != "" { + return fmt.Sprintf("Error: %s", guardError), nil + } + + cmdCtx, cancel := context.WithTimeout(ctx, t.timeout) + defer cancel() + + cmd := exec.CommandContext(cmdCtx, "sh", "-c", command) + if cwd != "" { + cmd.Dir = cwd + } + + var stdout, stderr bytes.Buffer + cmd.Stdout = &stdout + cmd.Stderr = &stderr + + err := cmd.Run() + output := stdout.String() + if stderr.Len() > 0 { + output += "\nSTDERR:\n" + stderr.String() + } + + if err != nil { + if cmdCtx.Err() == context.DeadlineExceeded { + return fmt.Sprintf("Error: Command timed out after %v", t.timeout), nil + } + output += fmt.Sprintf("\nExit code: %v", err) + } + + if output == "" { + output = "(no output)" + } + + maxLen := 10000 + if len(output) > maxLen { + output = output[:maxLen] + fmt.Sprintf("\n... (truncated, %d more chars)", len(output)-maxLen) + } + + return output, nil +} + +func (t *ExecTool) guardCommand(command, cwd string) string { + cmd := strings.TrimSpace(command) + lower := strings.ToLower(cmd) + + for _, pattern := range t.denyPatterns { + if pattern.MatchString(lower) { + return "Command blocked by safety guard (dangerous pattern detected)" + } + } + + if len(t.allowPatterns) > 0 { + allowed := false + for _, pattern := range t.allowPatterns { + if pattern.MatchString(lower) { + allowed = true + break + } + } + if !allowed { + return "Command blocked by safety guard (not in allowlist)" + } + } + + if t.restrictToWorkspace { + if strings.Contains(cmd, "..\\") || strings.Contains(cmd, "../") { + return "Command blocked by safety guard (path traversal detected)" + } + + cwdPath, err := filepath.Abs(cwd) + if err != nil { + return "" + } + + pathPattern := regexp.MustCompile(`[A-Za-z]:\\[^\\\"']+|/[^\s\"']+`) + matches := pathPattern.FindAllString(cmd, -1) + + for _, raw := range matches { + p, err := filepath.Abs(raw) + if err != nil { + continue + } + + rel, err := filepath.Rel(cwdPath, p) + if err != nil { + continue + } + + if strings.HasPrefix(rel, "..") { + return "Command blocked by safety guard (path outside working dir)" + } + } + } + + return "" +} + +func (t *ExecTool) SetTimeout(timeout time.Duration) { + t.timeout = timeout +} + +func (t *ExecTool) SetRestrictToWorkspace(restrict bool) { + t.restrictToWorkspace = restrict +} + +func (t *ExecTool) SetAllowPatterns(patterns []string) error { + t.allowPatterns = make([]*regexp.Regexp, 0, len(patterns)) + for _, p := range patterns { + re, err := regexp.Compile(p) + if err != nil { + return fmt.Errorf("invalid allow pattern %q: %w", p, err) + } + t.allowPatterns = append(t.allowPatterns, re) + } + return nil +} diff --git a/pkg/tools/spawn.go b/pkg/tools/spawn.go new file mode 100644 index 0000000..1bd7ac4 --- /dev/null +++ b/pkg/tools/spawn.go @@ -0,0 +1,70 @@ +package tools + +import ( + "context" + "fmt" +) + +type SpawnTool struct { + manager *SubagentManager + originChannel string + originChatID string +} + +func NewSpawnTool(manager *SubagentManager) *SpawnTool { + return &SpawnTool{ + manager: manager, + originChannel: "cli", + originChatID: "direct", + } +} + +func (t *SpawnTool) Name() string { + return "spawn" +} + +func (t *SpawnTool) Description() string { + return "Spawn a subagent to handle a task in the background. Use this for complex or time-consuming tasks that can run independently. The subagent will complete the task and report back when done." +} + +func (t *SpawnTool) Parameters() map[string]interface{} { + return map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "task": map[string]interface{}{ + "type": "string", + "description": "The task for subagent to complete", + }, + "label": map[string]interface{}{ + "type": "string", + "description": "Optional short label for the task (for display)", + }, + }, + "required": []string{"task"}, + } +} + +func (t *SpawnTool) SetContext(channel, chatID string) { + t.originChannel = channel + t.originChatID = chatID +} + +func (t *SpawnTool) Execute(ctx context.Context, args map[string]interface{}) (string, error) { + task, ok := args["task"].(string) + if !ok { + return "", fmt.Errorf("task is required") + } + + label, _ := args["label"].(string) + + if t.manager == nil { + return "Error: Subagent manager not configured", nil + } + + result, err := t.manager.Spawn(ctx, task, label, t.originChannel, t.originChatID) + if err != nil { + return "", fmt.Errorf("failed to spawn subagent: %w", err) + } + + return result, nil +} diff --git a/pkg/tools/subagent.go b/pkg/tools/subagent.go new file mode 100644 index 0000000..6146340 --- /dev/null +++ b/pkg/tools/subagent.go @@ -0,0 +1,128 @@ +package tools + +import ( + "context" + "fmt" + "sync" + "time" + + "gitea.kkkk.dev/DBT/clawgo/pkg/bus" + "gitea.kkkk.dev/DBT/clawgo/pkg/providers" +) + +type SubagentTask struct { + ID string + Task string + Label string + OriginChannel string + OriginChatID string + Status string + Result string + Created int64 +} + +type SubagentManager struct { + tasks map[string]*SubagentTask + mu sync.RWMutex + provider providers.LLMProvider + bus *bus.MessageBus + workspace string + nextID int +} + +func NewSubagentManager(provider providers.LLMProvider, workspace string, bus *bus.MessageBus) *SubagentManager { + return &SubagentManager{ + tasks: make(map[string]*SubagentTask), + provider: provider, + bus: bus, + workspace: workspace, + nextID: 1, + } +} + +func (sm *SubagentManager) Spawn(ctx context.Context, task, label, originChannel, originChatID string) (string, error) { + sm.mu.Lock() + defer sm.mu.Unlock() + + taskID := fmt.Sprintf("subagent-%d", sm.nextID) + sm.nextID++ + + subagentTask := &SubagentTask{ + ID: taskID, + Task: task, + Label: label, + OriginChannel: originChannel, + OriginChatID: originChatID, + Status: "running", + Created: time.Now().UnixMilli(), + } + sm.tasks[taskID] = subagentTask + + go sm.runTask(ctx, subagentTask) + + if label != "" { + return fmt.Sprintf("Spawned subagent '%s' for task: %s", label, task), nil + } + return fmt.Sprintf("Spawned subagent for task: %s", task), nil +} + +func (sm *SubagentManager) runTask(ctx context.Context, task *SubagentTask) { + task.Status = "running" + task.Created = time.Now().UnixMilli() + + messages := []providers.Message{ + { + Role: "system", + Content: "You are a subagent. Complete the given task independently and report the result.", + }, + { + Role: "user", + Content: task.Task, + }, + } + + response, err := sm.provider.Chat(ctx, messages, nil, sm.provider.GetDefaultModel(), map[string]interface{}{ + "max_tokens": 4096, + }) + + sm.mu.Lock() + defer sm.mu.Unlock() + + if err != nil { + task.Status = "failed" + task.Result = fmt.Sprintf("Error: %v", err) + } else { + task.Status = "completed" + task.Result = response.Content + } + + // Send announce message back to main agent + if sm.bus != nil { + announceContent := fmt.Sprintf("Task '%s' completed.\n\nResult:\n%s", task.Label, task.Result) + sm.bus.PublishInbound(bus.InboundMessage{ + Channel: "system", + SenderID: fmt.Sprintf("subagent:%s", task.ID), + // Format: "original_channel:original_chat_id" for routing back + ChatID: fmt.Sprintf("%s:%s", task.OriginChannel, task.OriginChatID), + Content: announceContent, + }) + } +} + +func (sm *SubagentManager) GetTask(taskID string) (*SubagentTask, bool) { + sm.mu.RLock() + defer sm.mu.RUnlock() + task, ok := sm.tasks[taskID] + return task, ok +} + +func (sm *SubagentManager) ListTasks() []*SubagentTask { + sm.mu.RLock() + defer sm.mu.RUnlock() + + tasks := make([]*SubagentTask, 0, len(sm.tasks)) + for _, task := range sm.tasks { + tasks = append(tasks, task) + } + return tasks +} diff --git a/pkg/tools/system.go b/pkg/tools/system.go new file mode 100644 index 0000000..5b84678 --- /dev/null +++ b/pkg/tools/system.go @@ -0,0 +1,85 @@ +package tools + +import ( + "context" + "fmt" + "os" + "runtime" + "strings" + "syscall" +) + +type SystemInfoTool struct {} + +func NewSystemInfoTool() *SystemInfoTool { + return &SystemInfoTool{} +} + +func (t *SystemInfoTool) Name() string { + return "system_info" +} + +func (t *SystemInfoTool) Description() string { + return "Get current system status (CPU, RAM, Disk)." +} + +func (t *SystemInfoTool) Parameters() map[string]interface{} { + return map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{}, + } +} + +func (t *SystemInfoTool) Execute(ctx context.Context, args map[string]interface{}) (string, error) { + info := fmt.Sprintf("System Info:\n") + info += fmt.Sprintf("OS: %s %s\n", runtime.GOOS, runtime.GOARCH) + + // Load Average + if data, err := os.ReadFile("/proc/loadavg"); err == nil { + info += fmt.Sprintf("Load Avg: %s", string(data)) + } else { + info += "Load Avg: N/A\n" + } + + // RAM from /proc/meminfo + if data, err := os.ReadFile("/proc/meminfo"); err == nil { + lines := strings.Split(string(data), "\n") + var total, free, available uint64 + for _, line := range lines { + if strings.HasPrefix(line, "MemTotal:") { + fmt.Sscanf(line, "MemTotal: %d kB", &total) + } else if strings.HasPrefix(line, "MemFree:") { + fmt.Sscanf(line, "MemFree: %d kB", &free) + } else if strings.HasPrefix(line, "MemAvailable:") { + fmt.Sscanf(line, "MemAvailable: %d kB", &available) + } + } + if total > 0 { + // fallback if Available not present (older kernels) + if available == 0 { + available = free // very rough approximation + } + used := total - available + info += fmt.Sprintf("RAM: Used %.2f GB / Total %.2f GB (%.2f%%)\n", + float64(used)/1024/1024, float64(total)/1024/1024, float64(used)/float64(total)*100) + } + } else { + info += "RAM: N/A\n" + } + + // Disk usage for / + var stat syscall.Statfs_t + if err := syscall.Statfs("/", &stat); err == nil { + // Cast to uint64 to avoid overflow/type mismatch + bsize := uint64(stat.Bsize) + total := stat.Blocks * bsize + free := stat.Bfree * bsize + used := total - free + info += fmt.Sprintf("Disk (/): Used %.2f GB / Total %.2f GB (%.2f%%)\n", + float64(used)/1024/1024/1024, float64(total)/1024/1024/1024, float64(used)/float64(total)*100) + } else { + info += "Disk: N/A\n" + } + + return info, nil +} diff --git a/pkg/tools/types.go b/pkg/tools/types.go new file mode 100644 index 0000000..f8205b8 --- /dev/null +++ b/pkg/tools/types.go @@ -0,0 +1,52 @@ +package tools + +import "context" + +type Message struct { + Role string `json:"role"` + Content string `json:"content"` + ToolCalls []ToolCall `json:"tool_calls,omitempty"` + ToolCallID string `json:"tool_call_id,omitempty"` +} + +type ToolCall struct { + ID string `json:"id"` + Type string `json:"type"` + Function *FunctionCall `json:"function,omitempty"` + Name string `json:"name,omitempty"` + Arguments map[string]interface{} `json:"arguments,omitempty"` +} + +type FunctionCall struct { + Name string `json:"name"` + Arguments string `json:"arguments"` +} + +type LLMResponse struct { + Content string `json:"content"` + ToolCalls []ToolCall `json:"tool_calls,omitempty"` + FinishReason string `json:"finish_reason"` + Usage *UsageInfo `json:"usage,omitempty"` +} + +type UsageInfo struct { + PromptTokens int `json:"prompt_tokens"` + CompletionTokens int `json:"completion_tokens"` + TotalTokens int `json:"total_tokens"` +} + +type LLMProvider interface { + Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*LLMResponse, error) + GetDefaultModel() string +} + +type ToolDefinition struct { + Type string `json:"type"` + Function ToolFunctionDefinition `json:"function"` +} + +type ToolFunctionDefinition struct { + Name string `json:"name"` + Description string `json:"description"` + Parameters map[string]interface{} `json:"parameters"` +} diff --git a/pkg/tools/web.go b/pkg/tools/web.go new file mode 100644 index 0000000..218815e --- /dev/null +++ b/pkg/tools/web.go @@ -0,0 +1,346 @@ +package tools + +import ( + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "regexp" + "strings" + "time" +) + +const ( + userAgent = "Mozilla/5.0 (compatible; clawgo/1.0)" +) + +type WebSearchTool struct { + apiKey string + maxResults int +} + +func NewWebSearchTool(apiKey string, maxResults int) *WebSearchTool { + if maxResults <= 0 || maxResults > 10 { + maxResults = 5 + } + return &WebSearchTool{ + apiKey: apiKey, + maxResults: maxResults, + } +} + +func (t *WebSearchTool) Name() string { + return "web_search" +} + +func (t *WebSearchTool) Description() string { + return "Search the web for current information. Returns titles, URLs, and snippets from search results." +} + +func (t *WebSearchTool) Parameters() map[string]interface{} { + return map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "query": map[string]interface{}{ + "type": "string", + "description": "Search query", + }, + "count": map[string]interface{}{ + "type": "integer", + "description": "Number of results (1-10)", + "minimum": 1.0, + "maximum": 10.0, + }, + }, + "required": []string{"query"}, + } +} + +func (t *WebSearchTool) Execute(ctx context.Context, args map[string]interface{}) (string, error) { + if t.apiKey == "" { + return "Error: BRAVE_API_KEY not configured", nil + } + + query, ok := args["query"].(string) + if !ok { + return "", fmt.Errorf("query is required") + } + + count := t.maxResults + if c, ok := args["count"].(float64); ok { + if int(c) > 0 && int(c) <= 10 { + count = int(c) + } + } + + searchURL := fmt.Sprintf("https://api.search.brave.com/res/v1/web/search?q=%s&count=%d", + url.QueryEscape(query), count) + + req, err := http.NewRequestWithContext(ctx, "GET", searchURL, nil) + if err != nil { + return "", fmt.Errorf("failed to create request: %w", err) + } + + req.Header.Set("Accept", "application/json") + req.Header.Set("X-Subscription-Token", t.apiKey) + + client := &http.Client{Timeout: 10 * time.Second} + resp, err := client.Do(req) + if err != nil { + return "", fmt.Errorf("request failed: %w", err) + } + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return "", fmt.Errorf("failed to read response: %w", err) + } + + var searchResp struct { + Web struct { + Results []struct { + Title string `json:"title"` + URL string `json:"url"` + Description string `json:"description"` + } `json:"results"` + } `json:"web"` + } + + if err := json.Unmarshal(body, &searchResp); err != nil { + return "", fmt.Errorf("failed to parse response: %w", err) + } + + results := searchResp.Web.Results + if len(results) == 0 { + return fmt.Sprintf("No results for: %s", query), nil + } + + var lines []string + lines = append(lines, fmt.Sprintf("Results for: %s", query)) + for i, item := range results { + if i >= count { + break + } + lines = append(lines, fmt.Sprintf("%d. %s\n %s", i+1, item.Title, item.URL)) + if item.Description != "" { + lines = append(lines, fmt.Sprintf(" %s", item.Description)) + } + } + + return strings.Join(lines, "\n"), nil +} + +type WebFetchTool struct { + maxChars int +} + +func NewWebFetchTool(maxChars int) *WebFetchTool { + if maxChars <= 0 { + maxChars = 50000 + } + return &WebFetchTool{ + maxChars: maxChars, + } +} + +func (t *WebFetchTool) Name() string { + return "web_fetch" +} + +func (t *WebFetchTool) Description() string { + return "Fetch a URL and extract readable content (HTML to Markdown). Preserves structure (headers, links, code blocks) for better reading." +} + +func (t *WebFetchTool) Parameters() map[string]interface{} { + return map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "url": map[string]interface{}{ + "type": "string", + "description": "URL to fetch", + }, + "maxChars": map[string]interface{}{ + "type": "integer", + "description": "Maximum characters to extract", + "minimum": 100.0, + }, + }, + "required": []string{"url"}, + } +} + +func (t *WebFetchTool) Execute(ctx context.Context, args map[string]interface{}) (string, error) { + urlStr, ok := args["url"].(string) + if !ok { + return "", fmt.Errorf("url is required") + } + + parsedURL, err := url.Parse(urlStr) + if err != nil { + return "", fmt.Errorf("invalid URL: %w", err) + } + + if parsedURL.Scheme != "http" && parsedURL.Scheme != "https" { + return "", fmt.Errorf("only http/https URLs are allowed") + } + + if parsedURL.Host == "" { + return "", fmt.Errorf("missing domain in URL") + } + + maxChars := t.maxChars + if mc, ok := args["maxChars"].(float64); ok { + if int(mc) > 100 { + maxChars = int(mc) + } + } + + req, err := http.NewRequestWithContext(ctx, "GET", urlStr, nil) + if err != nil { + return "", fmt.Errorf("failed to create request: %w", err) + } + + req.Header.Set("User-Agent", userAgent) + + client := &http.Client{ + Timeout: 60 * time.Second, + Transport: &http.Transport{ + MaxIdleConns: 10, + IdleConnTimeout: 30 * time.Second, + DisableCompression: false, + TLSHandshakeTimeout: 15 * time.Second, + }, + CheckRedirect: func(req *http.Request, via []*http.Request) error { + if len(via) >= 5 { + return fmt.Errorf("stopped after 5 redirects") + } + return nil + }, + } + + resp, err := client.Do(req) + if err != nil { + return "", fmt.Errorf("request failed: %w", err) + } + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return "", fmt.Errorf("failed to read response: %w", err) + } + + contentType := resp.Header.Get("Content-Type") + + var text, extractor string + + if strings.Contains(contentType, "application/json") { + var jsonData interface{} + if err := json.Unmarshal(body, &jsonData); err == nil { + formatted, _ := json.MarshalIndent(jsonData, "", " ") + text = string(formatted) + extractor = "json" + } else { + text = string(body) + extractor = "raw" + } + } else if strings.Contains(contentType, "text/html") || len(body) > 0 && + (strings.HasPrefix(string(body), " maxChars + if truncated { + text = text[:maxChars] + } + + result := map[string]interface{}{ + "url": urlStr, + "status": resp.StatusCode, + "extractor": extractor, + "truncated": truncated, + "length": len(text), + "text": text, + } + + resultJSON, _ := json.MarshalIndent(result, "", " ") + return string(resultJSON), nil +} + +// extractMarkdown converts HTML to simplified Markdown using Regex. +// It's not perfect but much better than stripping everything. +func (t *WebFetchTool) extractMarkdown(html string) string { + // 1. Remove Scripts and Styles + re := regexp.MustCompile(`(?i)`) + html = re.ReplaceAllLiteralString(html, "") + re = regexp.MustCompile(`(?i)`) + html = re.ReplaceAllLiteralString(html, "") + re = regexp.MustCompile(`(?i)`) + html = re.ReplaceAllLiteralString(html, "") + + // 2. Pre-process block elements to ensure newlines + // Replace ,

, etc. with newlines + re = regexp.MustCompile(`(?i)`) + html = re.ReplaceAllString(html, "\n$0") + + // 3. Convert Headers + re = regexp.MustCompile(`(?i)]*>(.*?)`) + html = re.ReplaceAllString(html, "\n# $1\n") + re = regexp.MustCompile(`(?i)]*>(.*?)`) + html = re.ReplaceAllString(html, "\n## $1\n") + re = regexp.MustCompile(`(?i)]*>(.*?)`) + html = re.ReplaceAllString(html, "\n### $1\n") + re = regexp.MustCompile(`(?i)]*>(.*?)<.*?>`) + html = re.ReplaceAllString(html, "\n#### $1\n") + + // 4. Convert Links: text -> [text](url) + re = regexp.MustCompile(`(?i)]+href="([^"]+)"[^>]*>(.*?)`) + html = re.ReplaceAllString(html, "[$2]($1)") + + // 5. Convert Images: text -> ![text](url) + re = regexp.MustCompile(`(?i)]+src="([^"]+)"[^>]*alt="([^"]*)"[^>]*>`) + html = re.ReplaceAllString(html, "![$2]($1)") + + // 6. Convert Bold/Italic + re = regexp.MustCompile(`(?i)<(b|strong)>(.*?)`) + html = re.ReplaceAllString(html, "**$2**") + re = regexp.MustCompile(`(?i)<(i|em)>(.*?)`) + html = re.ReplaceAllString(html, "*$2*") + + // 7. Convert Code Blocks + re = regexp.MustCompile(`(?i)]*>]*>([\s\S]*?)`) + html = re.ReplaceAllString(html, "\n```\n$1\n```\n") + re = regexp.MustCompile(`(?i)]*>(.*?)`) + html = re.ReplaceAllString(html, "`$1`") + + // 8. Convert Lists + re = regexp.MustCompile(`(?i)]*>(.*?)`) + html = re.ReplaceAllString(html, "- $1\n") + + // 9. Strip remaining tags + re = regexp.MustCompile(`<[^>]+>`) + html = re.ReplaceAllLiteralString(html, "") + + // 10. Decode HTML Entities (Basic ones) + html = strings.ReplaceAll(html, " ", " ") + html = strings.ReplaceAll(html, "&", "&") + html = strings.ReplaceAll(html, "<", "<") + html = strings.ReplaceAll(html, ">", ">") + html = strings.ReplaceAll(html, """, "\"") + html = strings.ReplaceAll(html, "'", "'") + + // 11. Cleanup Whitespace + // Collapse multiple spaces + re = regexp.MustCompile(`[ \t]+`) + html = re.ReplaceAllLiteralString(html, " ") + // Collapse multiple newlines (max 2) + re = regexp.MustCompile(`\n{3,}`) + html = re.ReplaceAllLiteralString(html, "\n\n") + + return strings.TrimSpace(html) +} diff --git a/pkg/utils/utils.go b/pkg/utils/utils.go new file mode 100644 index 0000000..6602bb2 --- /dev/null +++ b/pkg/utils/utils.go @@ -0,0 +1,26 @@ +package utils + +import ( + "encoding/json" + "fmt" +) + +// PrettyPrint prints a data structure as a pretty JSON string. +func PrettyPrint(v interface{}) { + b, err := json.MarshalIndent(v, "", " ") + if err != nil { + fmt.Println("error:", err) + } + fmt.Println(string(b)) +} + +// Truncate returns a truncated version of s with at most maxLen characters. +func Truncate(s string, maxLen int) string { + if len(s) <= maxLen { + return s + } + if maxLen <= 3 { + return s[:maxLen] + } + return s[:maxLen-3] + "..." +} diff --git a/pkg/voice/transcriber.go b/pkg/voice/transcriber.go new file mode 100644 index 0000000..e6142eb --- /dev/null +++ b/pkg/voice/transcriber.go @@ -0,0 +1,165 @@ +package voice + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "mime/multipart" + "net/http" + "os" + "path/filepath" + "time" + + "gitea.kkkk.dev/DBT/clawgo/pkg/logger" +) + +type GroqTranscriber struct { + apiKey string + apiBase string + httpClient *http.Client +} + +type TranscriptionResponse struct { + Text string `json:"text"` + Language string `json:"language,omitempty"` + Duration float64 `json:"duration,omitempty"` +} + +func NewGroqTranscriber(apiKey string) *GroqTranscriber { + logger.DebugCF("voice", "Creating Groq transcriber", map[string]interface{}{"has_api_key": apiKey != ""}) + + apiBase := "https://api.groq.com/openai/v1" + return &GroqTranscriber{ + apiKey: apiKey, + apiBase: apiBase, + httpClient: &http.Client{ + Timeout: 60 * time.Second, + }, + } +} + +func (t *GroqTranscriber) Transcribe(ctx context.Context, audioFilePath string) (*TranscriptionResponse, error) { + logger.InfoCF("voice", "Starting transcription", map[string]interface{}{"audio_file": audioFilePath}) + + audioFile, err := os.Open(audioFilePath) + if err != nil { + logger.ErrorCF("voice", "Failed to open audio file", map[string]interface{}{"path": audioFilePath, "error": err}) + return nil, fmt.Errorf("failed to open audio file: %w", err) + } + defer audioFile.Close() + + fileInfo, err := audioFile.Stat() + if err != nil { + logger.ErrorCF("voice", "Failed to get file info", map[string]interface{}{"path": audioFilePath, "error": err}) + return nil, fmt.Errorf("failed to get file info: %w", err) + } + + logger.DebugCF("voice", "Audio file details", map[string]interface{}{ + "size_bytes": fileInfo.Size(), + "file_name": filepath.Base(audioFilePath), + }) + + var requestBody bytes.Buffer + writer := multipart.NewWriter(&requestBody) + + part, err := writer.CreateFormFile("file", filepath.Base(audioFilePath)) + if err != nil { + logger.ErrorCF("voice", "Failed to create form file", map[string]interface{}{"error": err}) + return nil, fmt.Errorf("failed to create form file: %w", err) + } + + copied, err := io.Copy(part, audioFile) + if err != nil { + logger.ErrorCF("voice", "Failed to copy file content", map[string]interface{}{"error": err}) + return nil, fmt.Errorf("failed to copy file content: %w", err) + } + + logger.DebugCF("voice", "File copied to request", map[string]interface{}{"bytes_copied": copied}) + + if err := writer.WriteField("model", "whisper-large-v3"); err != nil { + logger.ErrorCF("voice", "Failed to write model field", map[string]interface{}{"error": err}) + return nil, fmt.Errorf("failed to write model field: %w", err) + } + + if err := writer.WriteField("response_format", "json"); err != nil { + logger.ErrorCF("voice", "Failed to write response_format field", map[string]interface{}{"error": err}) + return nil, fmt.Errorf("failed to write response_format field: %w", err) + } + + if err := writer.Close(); err != nil { + logger.ErrorCF("voice", "Failed to close multipart writer", map[string]interface{}{"error": err}) + return nil, fmt.Errorf("failed to close multipart writer: %w", err) + } + + url := t.apiBase + "/audio/transcriptions" + req, err := http.NewRequestWithContext(ctx, "POST", url, &requestBody) + if err != nil { + logger.ErrorCF("voice", "Failed to create request", map[string]interface{}{"error": err}) + return nil, fmt.Errorf("failed to create request: %w", err) + } + + req.Header.Set("Content-Type", writer.FormDataContentType()) + req.Header.Set("Authorization", "Bearer "+t.apiKey) + + logger.DebugCF("voice", "Sending transcription request to Groq API", map[string]interface{}{ + "url": url, + "request_size_bytes": requestBody.Len(), + "file_size_bytes": fileInfo.Size(), + }) + + resp, err := t.httpClient.Do(req) + if err != nil { + logger.ErrorCF("voice", "Failed to send request", map[string]interface{}{"error": err}) + return nil, fmt.Errorf("failed to send request: %w", err) + } + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + if err != nil { + logger.ErrorCF("voice", "Failed to read response", map[string]interface{}{"error": err}) + return nil, fmt.Errorf("failed to read response: %w", err) + } + + if resp.StatusCode != http.StatusOK { + logger.ErrorCF("voice", "API error", map[string]interface{}{ + "status_code": resp.StatusCode, + "response": string(body), + }) + return nil, fmt.Errorf("API error (status %d): %s", resp.StatusCode, string(body)) + } + + logger.DebugCF("voice", "Received response from Groq API", map[string]interface{}{ + "status_code": resp.StatusCode, + "response_size_bytes": len(body), + }) + + var result TranscriptionResponse + if err := json.Unmarshal(body, &result); err != nil { + logger.ErrorCF("voice", "Failed to unmarshal response", map[string]interface{}{"error": err}) + return nil, fmt.Errorf("failed to unmarshal response: %w", err) + } + + logger.InfoCF("voice", "Transcription completed successfully", map[string]interface{}{ + "text_length": len(result.Text), + "language": result.Language, + "duration_seconds": result.Duration, + "transcription_preview": truncateText(result.Text, 50), + }) + + return &result, nil +} + +func (t *GroqTranscriber) IsAvailable() bool { + available := t.apiKey != "" + logger.DebugCF("voice", "Checking transcriber availability", map[string]interface{}{"available": available}) + return available +} + +func truncateText(text string, maxLen int) string { + if len(text) <= maxLen { + return text + } + return text[:maxLen] + "..." +} diff --git a/skills/clawhub/SKILL.md b/skills/clawhub/SKILL.md new file mode 100644 index 0000000..f44c82b --- /dev/null +++ b/skills/clawhub/SKILL.md @@ -0,0 +1,77 @@ +--- +name: clawhub +description: Use the ClawHub CLI to search, install, update, and publish agent skills from clawhub.com. Use when you need to fetch new skills on the fly, sync installed skills to latest or a specific version, or publish new/updated skill folders with the npm-installed clawhub CLI. +metadata: + { + "openclaw": + { + "requires": { "bins": ["clawhub"] }, + "install": + [ + { + "id": "node", + "kind": "node", + "package": "clawhub", + "bins": ["clawhub"], + "label": "Install ClawHub CLI (npm)", + }, + ], + }, + } +--- + +# ClawHub CLI + +Install + +```bash +npm i -g clawhub +``` + +Auth (publish) + +```bash +clawhub login +clawhub whoami +``` + +Search + +```bash +clawhub search "postgres backups" +``` + +Install + +```bash +clawhub install my-skill +clawhub install my-skill --version 1.2.3 +``` + +Update (hash-based match + upgrade) + +```bash +clawhub update my-skill +clawhub update my-skill --version 1.2.3 +clawhub update --all +clawhub update my-skill --force +clawhub update --all --no-input --force +``` + +List + +```bash +clawhub list +``` + +Publish + +```bash +clawhub publish ./my-skill --slug my-skill --name "My Skill" --version 1.2.0 --changelog "Fixes + docs" +``` + +Notes + +- Default registry: https://clawhub.com (override with CLAWHUB_REGISTRY or --registry) +- Default workdir: cwd (falls back to OpenClaw workspace); install dir: ./skills (override with --workdir / --dir / CLAWHUB_WORKDIR) +- Update command hashes local files, resolves matching version, and upgrades to latest unless --version is set diff --git a/skills/coding-agent/SKILL.md b/skills/coding-agent/SKILL.md new file mode 100644 index 0000000..3acac32 --- /dev/null +++ b/skills/coding-agent/SKILL.md @@ -0,0 +1,274 @@ +--- +name: coding-agent +description: Run Codex CLI, Claude Code, OpenCode, or Pi Coding Agent via background process for programmatic control. +metadata: {"clawdbot":{"emoji":"🧩","requires":{"anyBins":["claude","codex","opencode","pi"]}}} +--- + +# Coding Agent (background-first) + +Use **bash background mode** for non-interactive coding work. For interactive coding sessions, use the **tmux** skill (always, except very simple one-shot prompts). + +## The Pattern: workdir + background + +```bash +# Create temp space for chats/scratch work +SCRATCH=$(mktemp -d) + +# Start agent in target directory ("little box" - only sees relevant files) +bash workdir:$SCRATCH background:true command:"" +# Or for project work: +bash workdir:~/project/folder background:true command:"" +# Returns sessionId for tracking + +# Monitor progress +process action:log sessionId:XXX + +# Check if done +process action:poll sessionId:XXX + +# Send input (if agent asks a question) +process action:write sessionId:XXX data:"y" + +# Kill if needed +process action:kill sessionId:XXX +``` + +**Why workdir matters:** Agent wakes up in a focused directory, doesn't wander off reading unrelated files (like your soul.md 😅). + +--- + +## Codex CLI + +**Model:** `gpt-5.3-codex` is the default (set in ~/.codex/config.toml) + +### Building/Creating (use --full-auto or --yolo) +```bash +# --full-auto: sandboxed but auto-approves in workspace +bash workdir:~/project background:true command:"codex exec --full-auto \"Build a snake game with dark theme\"" + +# --yolo: NO sandbox, NO approvals (fastest, most dangerous) +bash workdir:~/project background:true command:"codex --yolo \"Build a snake game with dark theme\"" + +# Note: --yolo is a shortcut for --dangerously-bypass-approvals-and-sandbox +``` + +### Reviewing PRs (vanilla, no flags) + +**⚠️ CRITICAL: Never review PRs in Clawdbot's own project folder!** +- Either use the project where the PR is submitted (if it's NOT ~/Projects/clawdbot) +- Or clone to a temp folder first + +```bash +# Option 1: Review in the actual project (if NOT clawdbot) +bash workdir:~/Projects/some-other-repo background:true command:"codex review --base main" + +# Option 2: Clone to temp folder for safe review (REQUIRED for clawdbot PRs!) +REVIEW_DIR=$(mktemp -d) +git clone https://github.com/clawdbot/clawdbot.git $REVIEW_DIR +cd $REVIEW_DIR && gh pr checkout 130 +bash workdir:$REVIEW_DIR background:true command:"codex review --base origin/main" +# Clean up after: rm -rf $REVIEW_DIR + +# Option 3: Use git worktree (keeps main intact) +git worktree add /tmp/pr-130-review pr-130-branch +bash workdir:/tmp/pr-130-review background:true command:"codex review --base main" +``` + +**Why?** Checking out branches in the running Clawdbot repo can break the live instance! + +### Batch PR Reviews (parallel army!) +```bash +# Fetch all PR refs first +git fetch origin '+refs/pull/*/head:refs/remotes/origin/pr/*' + +# Deploy the army - one Codex per PR! +bash workdir:~/project background:true command:"codex exec \"Review PR #86. git diff origin/main...origin/pr/86\"" +bash workdir:~/project background:true command:"codex exec \"Review PR #87. git diff origin/main...origin/pr/87\"" +bash workdir:~/project background:true command:"codex exec \"Review PR #95. git diff origin/main...origin/pr/95\"" +# ... repeat for all PRs + +# Monitor all +process action:list + +# Get results and post to GitHub +process action:log sessionId:XXX +gh pr comment --body "" +``` + +### Tips for PR Reviews +- **Fetch refs first:** `git fetch origin '+refs/pull/*/head:refs/remotes/origin/pr/*'` +- **Use git diff:** Tell Codex to use `git diff origin/main...origin/pr/XX` +- **Don't checkout:** Multiple parallel reviews = don't let them change branches +- **Post results:** Use `gh pr comment` to post reviews to GitHub + +--- + +## Claude Code + +```bash +bash workdir:~/project background:true command:"claude \"Your task\"" +``` + +--- + +## OpenCode + +```bash +bash workdir:~/project background:true command:"opencode run \"Your task\"" +``` + +--- + +## Pi Coding Agent + +```bash +# Install: npm install -g @mariozechner/pi-coding-agent +bash workdir:~/project background:true command:"pi \"Your task\"" +``` + +--- + +## Pi flags (common) + +- `--print` / `-p`: non-interactive; runs prompt and exits. +- `--provider `: pick provider (default: google). +- `--model `: pick model (default: gemini-2.5-flash). +- `--api-key `: override API key (defaults to env vars). + +Examples: + +```bash +# Set provider + model, non-interactive +bash workdir:~/project background:true command:"pi --provider openai --model gpt-4o-mini -p \"Summarize src/\"" +``` + +--- + +## tmux (interactive sessions) + +Use the tmux skill for interactive coding sessions (always, except very simple one-shot prompts). Prefer bash background mode for non-interactive runs. + +--- + +## Parallel Issue Fixing with git worktrees + tmux + +For fixing multiple issues in parallel, use git worktrees (isolated branches) + tmux sessions: + +```bash +# 1. Clone repo to temp location +cd /tmp && git clone git@github.com:user/repo.git repo-worktrees +cd repo-worktrees + +# 2. Create worktrees for each issue (isolated branches!) +git worktree add -b fix/issue-78 /tmp/issue-78 main +git worktree add -b fix/issue-99 /tmp/issue-99 main + +# 3. Set up tmux sessions +SOCKET="${TMPDIR:-/tmp}/codex-fixes.sock" +tmux -S "$SOCKET" new-session -d -s fix-78 +tmux -S "$SOCKET" new-session -d -s fix-99 + +# 4. Launch Codex in each (after pnpm install!) +tmux -S "$SOCKET" send-keys -t fix-78 "cd /tmp/issue-78 && pnpm install && codex --yolo 'Fix issue #78: . Commit and push.'" Enter +tmux -S "$SOCKET" send-keys -t fix-99 "cd /tmp/issue-99 && pnpm install && codex --yolo 'Fix issue #99: . Commit and push.'" Enter + +# 5. Monitor progress +tmux -S "$SOCKET" capture-pane -p -t fix-78 -S -30 +tmux -S "$SOCKET" capture-pane -p -t fix-99 -S -30 + +# 6. Check if done (prompt returned) +tmux -S "$SOCKET" capture-pane -p -t fix-78 -S -3 | grep -q "❯" && echo "Done!" + +# 7. Create PRs after fixes +cd /tmp/issue-78 && git push -u origin fix/issue-78 +gh pr create --repo user/repo --head fix/issue-78 --title "fix: ..." --body "..." + +# 8. Cleanup +tmux -S "$SOCKET" kill-server +git worktree remove /tmp/issue-78 +git worktree remove /tmp/issue-99 +``` + +**Why worktrees?** Each Codex works in isolated branch, no conflicts. Can run 5+ parallel fixes! + +**Why tmux over bash background?** Codex is interactive — needs TTY for proper output. tmux provides persistent sessions with full history capture. + +--- + +## ⚠️ Rules + +1. **Respect tool choice** — if user asks for Codex, use Codex. NEVER offer to build it yourself! +2. **Be patient** — don't kill sessions because they're "slow" +3. **Monitor with process:log** — check progress without interfering +4. **--full-auto for building** — auto-approves changes +5. **vanilla for reviewing** — no special flags needed +6. **Parallel is OK** — run many Codex processes at once for batch work +7. **NEVER start Codex in ~/clawd/** — it'll read your soul docs and get weird ideas about the org chart! Use the target project dir or /tmp for blank slate chats +8. **NEVER checkout branches in ~/Projects/clawdbot/** — that's the LIVE Clawdbot instance! Clone to /tmp or use git worktree for PR reviews + +--- + +## PR Template (The Razor Standard) + +When submitting PRs to external repos, use this format for quality & maintainer-friendliness: + +````markdown +## Original Prompt +[Exact request/problem statement] + +## What this does +[High-level description] + +**Features:** +- [Key feature 1] +- [Key feature 2] + +**Example usage:** +```bash +# Example +command example +``` + +## Feature intent (maintainer-friendly) +[Why useful, how it fits, workflows it enables] + +## Prompt history (timestamped) +- YYYY-MM-DD HH:MM UTC: [Step 1] +- YYYY-MM-DD HH:MM UTC: [Step 2] + +## How I tested +**Manual verification:** +1. [Test step] - Output: `[result]` +2. [Test step] - Result: [result] + +**Files tested:** +- [Detail] +- [Edge cases] + +## Session logs (implementation) +- [What was researched] +- [What was discovered] +- [Time spent] + +## Implementation details +**New files:** +- `path/file.ts` - [description] + +**Modified files:** +- `path/file.ts` - [change] + +**Technical notes:** +- [Detail 1] +- [Detail 2] + +--- +*Submitted by Razor 🥷 - Mariano's AI agent* +```` + +**Key principles:** +1. Human-written description (no AI slop) +2. Feature intent for maintainers +3. Timestamped prompt history +4. Session logs if using Codex/agent + +**Example:** https://github.com/steipete/bird/pull/222 diff --git a/skills/github/SKILL.md b/skills/github/SKILL.md new file mode 100644 index 0000000..57d8127 --- /dev/null +++ b/skills/github/SKILL.md @@ -0,0 +1,48 @@ +--- +name: github +description: "Interact with GitHub using the `gh` CLI. Use `gh issue`, `gh pr`, `gh run`, and `gh api` for issues, PRs, CI runs, and advanced queries." +metadata: {"nanobot":{"emoji":"🐙","requires":{"bins":["gh"]},"install":[{"id":"brew","kind":"brew","formula":"gh","bins":["gh"],"label":"Install GitHub CLI (brew)"},{"id":"apt","kind":"apt","package":"gh","bins":["gh"],"label":"Install GitHub CLI (apt)"}]}} +--- + +# GitHub Skill + +Use the `gh` CLI to interact with GitHub. Always specify `--repo owner/repo` when not in a git directory, or use URLs directly. + +## Pull Requests + +Check CI status on a PR: +```bash +gh pr checks 55 --repo owner/repo +``` + +List recent workflow runs: +```bash +gh run list --repo owner/repo --limit 10 +``` + +View a run and see which steps failed: +```bash +gh run view --repo owner/repo +``` + +View logs for failed steps only: +```bash +gh run view --repo owner/repo --log-failed +``` + +## API for Advanced Queries + +The `gh api` command is useful for accessing data not available through other subcommands. + +Get PR with specific fields: +```bash +gh api repos/owner/repo/pulls/55 --jq '.title, .state, .user.login' +``` + +## JSON Output + +Most commands support `--json` for structured output. You can use `--jq` to filter: + +```bash +gh issue list --repo owner/repo --json number,title --jq '.[] | "\(.number): \(.title)"' +``` diff --git a/skills/healthcheck/SKILL.md b/skills/healthcheck/SKILL.md new file mode 100644 index 0000000..4471f04 --- /dev/null +++ b/skills/healthcheck/SKILL.md @@ -0,0 +1,245 @@ +--- +name: healthcheck +description: Host security hardening and risk-tolerance configuration for OpenClaw deployments. Use when a user asks for security audits, firewall/SSH/update hardening, risk posture, exposure review, OpenClaw cron scheduling for periodic checks, or version status checks on a machine running OpenClaw (laptop, workstation, Pi, VPS). +--- + +# OpenClaw Host Hardening + +## Overview + +Assess and harden the host running OpenClaw, then align it to a user-defined risk tolerance without breaking access. Use OpenClaw security tooling as a first-class signal, but treat OS hardening as a separate, explicit set of steps. + +## Core rules + +- Recommend running this skill with a state-of-the-art model (e.g., Opus 4.5, GPT 5.2+). The agent should self-check the current model and suggest switching if below that level; do not block execution. +- Require explicit approval before any state-changing action. +- Do not modify remote access settings without confirming how the user connects. +- Prefer reversible, staged changes with a rollback plan. +- Never claim OpenClaw changes the host firewall, SSH, or OS updates; it does not. +- If role/identity is unknown, provide recommendations only. +- Formatting: every set of user choices must be numbered so the user can reply with a single digit. +- System-level backups are recommended; try to verify status. + +## Workflow (follow in order) + +### 0) Model self-check (non-blocking) + +Before starting, check the current model. If it is below state-of-the-art (e.g., Opus 4.5, GPT 5.2+), recommend switching. Do not block execution. + +### 1) Establish context (read-only) + +Try to infer 1–5 from the environment before asking. Prefer simple, non-technical questions if you need confirmation. + +Determine (in order): + +1. OS and version (Linux/macOS/Windows), container vs host. +2. Privilege level (root/admin vs user). +3. Access path (local console, SSH, RDP, tailnet). +4. Network exposure (public IP, reverse proxy, tunnel). +5. OpenClaw gateway status and bind address. +6. Backup system and status (e.g., Time Machine, system images, snapshots). +7. Deployment context (local mac app, headless gateway host, remote gateway, container/CI). +8. Disk encryption status (FileVault/LUKS/BitLocker). +9. OS automatic security updates status. + Note: these are not blocking items, but are highly recommended, especially if OpenClaw can access sensitive data. +10. Usage mode for a personal assistant with full access (local workstation vs headless/remote vs other). + +First ask once for permission to run read-only checks. If granted, run them by default and only ask questions for items you cannot infer or verify. Do not ask for information already visible in runtime or command output. Keep the permission ask as a single sentence, and list follow-up info needed as an unordered list (not numbered) unless you are presenting selectable choices. + +If you must ask, use non-technical prompts: + +- “Are you using a Mac, Windows PC, or Linux?” +- “Are you logged in directly on the machine, or connecting from another computer?” +- “Is this machine reachable from the public internet, or only on your home/network?” +- “Do you have backups enabled (e.g., Time Machine), and are they current?” +- “Is disk encryption turned on (FileVault/BitLocker/LUKS)?” +- “Are automatic security updates enabled?” +- “How do you use this machine?” + Examples: + - Personal machine shared with the assistant + - Dedicated local machine for the assistant + - Dedicated remote machine/server accessed remotely (always on) + - Something else? + +Only ask for the risk profile after system context is known. + +If the user grants read-only permission, run the OS-appropriate checks by default. If not, offer them (numbered). Examples: + +1. OS: `uname -a`, `sw_vers`, `cat /etc/os-release`. +2. Listening ports: + - Linux: `ss -ltnup` (or `ss -ltnp` if `-u` unsupported). + - macOS: `lsof -nP -iTCP -sTCP:LISTEN`. +3. Firewall status: + - Linux: `ufw status`, `firewall-cmd --state`, `nft list ruleset` (pick what is installed). + - macOS: `/usr/libexec/ApplicationFirewall/socketfilterfw --getglobalstate` and `pfctl -s info`. +4. Backups (macOS): `tmutil status` (if Time Machine is used). + +### 2) Run OpenClaw security audits (read-only) + +As part of the default read-only checks, run `openclaw security audit --deep`. Only offer alternatives if the user requests them: + +1. `openclaw security audit` (faster, non-probing) +2. `openclaw security audit --json` (structured output) + +Offer to apply OpenClaw safe defaults (numbered): + +1. `openclaw security audit --fix` + +Be explicit that `--fix` only tightens OpenClaw defaults and file permissions. It does not change host firewall, SSH, or OS update policies. + +If browser control is enabled, recommend that 2FA be enabled on all important accounts, with hardware keys preferred and SMS not sufficient. + +### 3) Check OpenClaw version/update status (read-only) + +As part of the default read-only checks, run `openclaw update status`. + +Report the current channel and whether an update is available. + +### 4) Determine risk tolerance (after system context) + +Ask the user to pick or confirm a risk posture and any required open services/ports (numbered choices below). +Do not pigeonhole into fixed profiles; if the user prefers, capture requirements instead of choosing a profile. +Offer suggested profiles as optional defaults (numbered). Note that most users pick Home/Workstation Balanced: + +1. Home/Workstation Balanced (most common): firewall on with reasonable defaults, remote access restricted to LAN or tailnet. +2. VPS Hardened: deny-by-default inbound firewall, minimal open ports, key-only SSH, no root login, automatic security updates. +3. Developer Convenience: more local services allowed, explicit exposure warnings, still audited. +4. Custom: user-defined constraints (services, exposure, update cadence, access methods). + +### 5) Produce a remediation plan + +Provide a plan that includes: + +- Target profile +- Current posture summary +- Gaps vs target +- Step-by-step remediation with exact commands +- Access-preservation strategy and rollback +- Risks and potential lockout scenarios +- Least-privilege notes (e.g., avoid admin usage, tighten ownership/permissions where safe) +- Credential hygiene notes (location of OpenClaw creds, prefer disk encryption) + +Always show the plan before any changes. + +### 6) Offer execution options + +Offer one of these choices (numbered so users can reply with a single digit): + +1. Do it for me (guided, step-by-step approvals) +2. Show plan only +3. Fix only critical issues +4. Export commands for later + +### 7) Execute with confirmations + +For each step: + +- Show the exact command +- Explain impact and rollback +- Confirm access will remain available +- Stop on unexpected output and ask for guidance + +### 8) Verify and report + +Re-check: + +- Firewall status +- Listening ports +- Remote access still works +- OpenClaw security audit (re-run) + +Deliver a final posture report and note any deferred items. + +## Required confirmations (always) + +Require explicit approval for: + +- Firewall rule changes +- Opening/closing ports +- SSH/RDP configuration changes +- Installing/removing packages +- Enabling/disabling services +- User/group modifications +- Scheduling tasks or startup persistence +- Update policy changes +- Access to sensitive files or credentials + +If unsure, ask. + +## Periodic checks + +After OpenClaw install or first hardening pass, run at least one baseline audit and version check: + +- `openclaw security audit` +- `openclaw security audit --deep` +- `openclaw update status` + +Ongoing monitoring is recommended. Use the OpenClaw cron tool/CLI to schedule periodic audits (Gateway scheduler). Do not create scheduled tasks without explicit approval. Store outputs in a user-approved location and avoid secrets in logs. +When scheduling headless cron runs, include a note in the output that instructs the user to call `healthcheck` so issues can be fixed. + +### Required prompt to schedule (always) + +After any audit or hardening pass, explicitly offer scheduling and require a direct response. Use a short prompt like (numbered): + +1. “Do you want me to schedule periodic audits (e.g., daily/weekly) via `openclaw cron add`?” + +If the user says yes, ask for: + +- cadence (daily/weekly), preferred time window, and output location +- whether to also schedule `openclaw update status` + +Use a stable cron job name so updates are deterministic. Prefer exact names: + +- `healthcheck:security-audit` +- `healthcheck:update-status` + +Before creating, `openclaw cron list` and match on exact `name`. If found, `openclaw cron edit ...`. +If not found, `openclaw cron add --name ...`. + +Also offer a periodic version check so the user can decide when to update (numbered): + +1. `openclaw update status` (preferred for source checkouts and channels) +2. `npm view openclaw version` (published npm version) + +## OpenClaw command accuracy + +Use only supported commands and flags: + +- `openclaw security audit [--deep] [--fix] [--json]` +- `openclaw status` / `openclaw status --deep` +- `openclaw health --json` +- `openclaw update status` +- `openclaw cron add|list|runs|run` + +Do not invent CLI flags or imply OpenClaw enforces host firewall/SSH policies. + +## Logging and audit trail + +Record: + +- Gateway identity and role +- Plan ID and timestamp +- Approved steps and exact commands +- Exit codes and files modified (best effort) + +Redact secrets. Never log tokens or full credential contents. + +## Memory writes (conditional) + +Only write to memory files when the user explicitly opts in and the session is a private/local workspace +(per `docs/reference/templates/AGENTS.md`). Otherwise provide a redacted, paste-ready summary the user can +decide to save elsewhere. + +Follow the durable-memory prompt format used by OpenClaw compaction: + +- Write lasting notes to `memory/YYYY-MM-DD.md`. + +After each audit/hardening run, if opted-in, append a short, dated summary to `memory/YYYY-MM-DD.md` +(what was checked, key findings, actions taken, any scheduled cron jobs, key decisions, +and all commands executed). Append-only: never overwrite existing entries. +Redact sensitive host details (usernames, hostnames, IPs, serials, service names, tokens). +If there are durable preferences or decisions (risk posture, allowed ports, update policy), +also update `MEMORY.md` (long-term memory is optional and only used in private sessions). + +If the session cannot write to the workspace, ask for permission or provide exact entries +the user can paste into the memory files. diff --git a/skills/skill-creator/SKILL.md b/skills/skill-creator/SKILL.md new file mode 100644 index 0000000..9b5eb6f --- /dev/null +++ b/skills/skill-creator/SKILL.md @@ -0,0 +1,371 @@ +--- +name: skill-creator +description: Create or update AgentSkills. Use when designing, structuring, or packaging skills with scripts, references, and assets. +--- + +# Skill Creator + +This skill provides guidance for creating effective skills. + +## About Skills + +Skills are modular, self-contained packages that extend the agent's capabilities by providing +specialized knowledge, workflows, and tools. Think of them as "onboarding guides" for specific +domains or tasks—they transform the agent from a general-purpose agent into a specialized agent +equipped with procedural knowledge that no model can fully possess. + +### What Skills Provide + +1. Specialized workflows - Multi-step procedures for specific domains +2. Tool integrations - Instructions for working with specific file formats or APIs +3. Domain expertise - Company-specific knowledge, schemas, business logic +4. Bundled resources - Scripts, references, and assets for complex and repetitive tasks + +## Core Principles + +### Concise is Key + +The context window is a public good. Skills share the context window with everything else the agent needs: system prompt, conversation history, other Skills' metadata, and the actual user request. + +**Default assumption: the agent is already very smart.** Only add context the agent doesn't already have. Challenge each piece of information: "Does the agent really need this explanation?" and "Does this paragraph justify its token cost?" + +Prefer concise examples over verbose explanations. + +### Set Appropriate Degrees of Freedom + +Match the level of specificity to the task's fragility and variability: + +**High freedom (text-based instructions)**: Use when multiple approaches are valid, decisions depend on context, or heuristics guide the approach. + +**Medium freedom (pseudocode or scripts with parameters)**: Use when a preferred pattern exists, some variation is acceptable, or configuration affects behavior. + +**Low freedom (specific scripts, few parameters)**: Use when operations are fragile and error-prone, consistency is critical, or a specific sequence must be followed. + +Think of the agent as exploring a path: a narrow bridge with cliffs needs specific guardrails (low freedom), while an open field allows many routes (high freedom). + +### Anatomy of a Skill + +Every skill consists of a required SKILL.md file and optional bundled resources: + +``` +skill-name/ +├── SKILL.md (required) +│ ├── YAML frontmatter metadata (required) +│ │ ├── name: (required) +│ │ └── description: (required) +│ └── Markdown instructions (required) +└── Bundled Resources (optional) + ├── scripts/ - Executable code (Python/Bash/etc.) + ├── references/ - Documentation intended to be loaded into context as needed + └── assets/ - Files used in output (templates, icons, fonts, etc.) +``` + +#### SKILL.md (required) + +Every SKILL.md consists of: + +- **Frontmatter** (YAML): Contains `name` and `description` fields. These are the only fields that the agent reads to determine when the skill gets used, thus it is very important to be clear and comprehensive in describing what the skill is, and when it should be used. +- **Body** (Markdown): Instructions and guidance for using the skill. Only loaded AFTER the skill triggers (if at all). + +#### Bundled Resources (optional) + +##### Scripts (`scripts/`) + +Executable code (Python/Bash/etc.) for tasks that require deterministic reliability or are repeatedly rewritten. + +- **When to include**: When the same code is being rewritten repeatedly or deterministic reliability is needed +- **Example**: `scripts/rotate_pdf.py` for PDF rotation tasks +- **Benefits**: Token efficient, deterministic, may be executed without loading into context +- **Note**: Scripts may still need to be read by the agent for patching or environment-specific adjustments + +##### References (`references/`) + +Documentation and reference material intended to be loaded as needed into context to inform the agent's process and thinking. + +- **When to include**: For documentation that the agent should reference while working +- **Examples**: `references/finance.md` for financial schemas, `references/mnda.md` for company NDA template, `references/policies.md` for company policies, `references/api_docs.md` for API specifications +- **Use cases**: Database schemas, API documentation, domain knowledge, company policies, detailed workflow guides +- **Benefits**: Keeps SKILL.md lean, loaded only when the agent determines it's needed +- **Best practice**: If files are large (>10k words), include grep search patterns in SKILL.md +- **Avoid duplication**: Information should live in either SKILL.md or references files, not both. Prefer references files for detailed information unless it's truly core to the skill—this keeps SKILL.md lean while making information discoverable without hogging the context window. Keep only essential procedural instructions and workflow guidance in SKILL.md; move detailed reference material, schemas, and examples to references files. + +##### Assets (`assets/`) + +Files not intended to be loaded into context, but rather used within the output the agent produces. + +- **When to include**: When the skill needs files that will be used in the final output +- **Examples**: `assets/logo.png` for brand assets, `assets/slides.pptx` for PowerPoint templates, `assets/frontend-template/` for HTML/React boilerplate, `assets/font.ttf` for typography +- **Use cases**: Templates, images, icons, boilerplate code, fonts, sample documents that get copied or modified +- **Benefits**: Separates output resources from documentation, enables the agent to use files without loading them into context + +#### What to Not Include in a Skill + +A skill should only contain essential files that directly support its functionality. Do NOT create extraneous documentation or auxiliary files, including: + +- README.md +- INSTALLATION_GUIDE.md +- QUICK_REFERENCE.md +- CHANGELOG.md +- etc. + +The skill should only contain the information needed for an AI agent to do the job at hand. It should not contain auxiliary context about the process that went into creating it, setup and testing procedures, user-facing documentation, etc. Creating additional documentation files just adds clutter and confusion. + +### Progressive Disclosure Design Principle + +Skills use a three-level loading system to manage context efficiently: + +1. **Metadata (name + description)** - Always in context (~100 words) +2. **SKILL.md body** - When skill triggers (<5k words) +3. **Bundled resources** - As needed by the agent (Unlimited because scripts can be executed without reading into context window) + +#### Progressive Disclosure Patterns + +Keep SKILL.md body to the essentials and under 500 lines to minimize context bloat. Split content into separate files when approaching this limit. When splitting out content into other files, it is very important to reference them from SKILL.md and describe clearly when to read them, to ensure the reader of the skill knows they exist and when to use them. + +**Key principle:** When a skill supports multiple variations, frameworks, or options, keep only the core workflow and selection guidance in SKILL.md. Move variant-specific details (patterns, examples, configuration) into separate reference files. + +**Pattern 1: High-level guide with references** + +```markdown +# PDF Processing + +## Quick start + +Extract text with pdfplumber: +[code example] + +## Advanced features + +- **Form filling**: See [FORMS.md](FORMS.md) for complete guide +- **API reference**: See [REFERENCE.md](REFERENCE.md) for all methods +- **Examples**: See [EXAMPLES.md](EXAMPLES.md) for common patterns +``` + +the agent loads FORMS.md, REFERENCE.md, or EXAMPLES.md only when needed. + +**Pattern 2: Domain-specific organization** + +For Skills with multiple domains, organize content by domain to avoid loading irrelevant context: + +``` +bigquery-skill/ +├── SKILL.md (overview and navigation) +└── reference/ + ├── finance.md (revenue, billing metrics) + ├── sales.md (opportunities, pipeline) + ├── product.md (API usage, features) + └── marketing.md (campaigns, attribution) +``` + +When a user asks about sales metrics, the agent only reads sales.md. + +Similarly, for skills supporting multiple frameworks or variants, organize by variant: + +``` +cloud-deploy/ +├── SKILL.md (workflow + provider selection) +└── references/ + ├── aws.md (AWS deployment patterns) + ├── gcp.md (GCP deployment patterns) + └── azure.md (Azure deployment patterns) +``` + +When the user chooses AWS, the agent only reads aws.md. + +**Pattern 3: Conditional details** + +Show basic content, link to advanced content: + +```markdown +# DOCX Processing + +## Creating documents + +Use docx-js for new documents. See [DOCX-JS.md](DOCX-JS.md). + +## Editing documents + +For simple edits, modify the XML directly. + +**For tracked changes**: See [REDLINING.md](REDLINING.md) +**For OOXML details**: See [OOXML.md](OOXML.md) +``` + +the agent reads REDLINING.md or OOXML.md only when the user needs those features. + +**Important guidelines:** + +- **Avoid deeply nested references** - Keep references one level deep from SKILL.md. All reference files should link directly from SKILL.md. +- **Structure longer reference files** - For files longer than 100 lines, include a table of contents at the top so the agent can see the full scope when previewing. + +## Skill Creation Process + +Skill creation involves these steps: + +1. Understand the skill with concrete examples +2. Plan reusable skill contents (scripts, references, assets) +3. Initialize the skill (run init_skill.py) +4. Edit the skill (implement resources and write SKILL.md) +5. Package the skill (run package_skill.py) +6. Iterate based on real usage + +Follow these steps in order, skipping only if there is a clear reason why they are not applicable. + +### Skill Naming + +- Use lowercase letters, digits, and hyphens only; normalize user-provided titles to hyphen-case (e.g., "Plan Mode" -> `plan-mode`). +- When generating names, generate a name under 64 characters (letters, digits, hyphens). +- Prefer short, verb-led phrases that describe the action. +- Namespace by tool when it improves clarity or triggering (e.g., `gh-address-comments`, `linear-address-issue`). +- Name the skill folder exactly after the skill name. + +### Step 1: Understanding the Skill with Concrete Examples + +Skip this step only when the skill's usage patterns are already clearly understood. It remains valuable even when working with an existing skill. + +To create an effective skill, clearly understand concrete examples of how the skill will be used. This understanding can come from either direct user examples or generated examples that are validated with user feedback. + +For example, when building an image-editor skill, relevant questions include: + +- "What functionality should the image-editor skill support? Editing, rotating, anything else?" +- "Can you give some examples of how this skill would be used?" +- "I can imagine users asking for things like 'Remove the red-eye from this image' or 'Rotate this image'. Are there other ways you imagine this skill being used?" +- "What would a user say that should trigger this skill?" + +To avoid overwhelming users, avoid asking too many questions in a single message. Start with the most important questions and follow up as needed for better effectiveness. + +Conclude this step when there is a clear sense of the functionality the skill should support. + +### Step 2: Planning the Reusable Skill Contents + +To turn concrete examples into an effective skill, analyze each example by: + +1. Considering how to execute on the example from scratch +2. Identifying what scripts, references, and assets would be helpful when executing these workflows repeatedly + +Example: When building a `pdf-editor` skill to handle queries like "Help me rotate this PDF," the analysis shows: + +1. Rotating a PDF requires re-writing the same code each time +2. A `scripts/rotate_pdf.py` script would be helpful to store in the skill + +Example: When designing a `frontend-webapp-builder` skill for queries like "Build me a todo app" or "Build me a dashboard to track my steps," the analysis shows: + +1. Writing a frontend webapp requires the same boilerplate HTML/React each time +2. An `assets/hello-world/` template containing the boilerplate HTML/React project files would be helpful to store in the skill + +Example: When building a `big-query` skill to handle queries like "How many users have logged in today?" the analysis shows: + +1. Querying BigQuery requires re-discovering the table schemas and relationships each time +2. A `references/schema.md` file documenting the table schemas would be helpful to store in the skill + +To establish the skill's contents, analyze each concrete example to create a list of the reusable resources to include: scripts, references, and assets. + +### Step 3: Initializing the Skill + +At this point, it is time to actually create the skill. + +Skip this step only if the skill being developed already exists, and iteration or packaging is needed. In this case, continue to the next step. + +When creating a new skill from scratch, always run the `init_skill.py` script. The script conveniently generates a new template skill directory that automatically includes everything a skill requires, making the skill creation process much more efficient and reliable. + +Usage: + +```bash +scripts/init_skill.py --path [--resources scripts,references,assets] [--examples] +``` + +Examples: + +```bash +scripts/init_skill.py my-skill --path skills/public +scripts/init_skill.py my-skill --path skills/public --resources scripts,references +scripts/init_skill.py my-skill --path skills/public --resources scripts --examples +``` + +The script: + +- Creates the skill directory at the specified path +- Generates a SKILL.md template with proper frontmatter and TODO placeholders +- Optionally creates resource directories based on `--resources` +- Optionally adds example files when `--examples` is set + +After initialization, customize the SKILL.md and add resources as needed. If you used `--examples`, replace or delete placeholder files. + +### Step 4: Edit the Skill + +When editing the (newly-generated or existing) skill, remember that the skill is being created for another instance of the agent to use. Include information that would be beneficial and non-obvious to the agent. Consider what procedural knowledge, domain-specific details, or reusable assets would help another the agent instance execute these tasks more effectively. + +#### Learn Proven Design Patterns + +Consult these helpful guides based on your skill's needs: + +- **Multi-step processes**: See references/workflows.md for sequential workflows and conditional logic +- **Specific output formats or quality standards**: See references/output-patterns.md for template and example patterns + +These files contain established best practices for effective skill design. + +#### Start with Reusable Skill Contents + +To begin implementation, start with the reusable resources identified above: `scripts/`, `references/`, and `assets/` files. Note that this step may require user input. For example, when implementing a `brand-guidelines` skill, the user may need to provide brand assets or templates to store in `assets/`, or documentation to store in `references/`. + +Added scripts must be tested by actually running them to ensure there are no bugs and that the output matches what is expected. If there are many similar scripts, only a representative sample needs to be tested to ensure confidence that they all work while balancing time to completion. + +If you used `--examples`, delete any placeholder files that are not needed for the skill. Only create resource directories that are actually required. + +#### Update SKILL.md + +**Writing Guidelines:** Always use imperative/infinitive form. + +##### Frontmatter + +Write the YAML frontmatter with `name` and `description`: + +- `name`: The skill name +- `description`: This is the primary triggering mechanism for your skill, and helps the agent understand when to use the skill. + - Include both what the Skill does and specific triggers/contexts for when to use it. + - Include all "when to use" information here - Not in the body. The body is only loaded after triggering, so "When to Use This Skill" sections in the body are not helpful to the agent. + - Example description for a `docx` skill: "Comprehensive document creation, editing, and analysis with support for tracked changes, comments, formatting preservation, and text extraction. Use when the agent needs to work with professional documents (.docx files) for: (1) Creating new documents, (2) Modifying or editing content, (3) Working with tracked changes, (4) Adding comments, or any other document tasks" + +Do not include any other fields in YAML frontmatter. + +##### Body + +Write instructions for using the skill and its bundled resources. + +### Step 5: Packaging a Skill + +Once development of the skill is complete, it must be packaged into a distributable .skill file that gets shared with the user. The packaging process automatically validates the skill first to ensure it meets all requirements: + +```bash +scripts/package_skill.py +``` + +Optional output directory specification: + +```bash +scripts/package_skill.py ./dist +``` + +The packaging script will: + +1. **Validate** the skill automatically, checking: + + - YAML frontmatter format and required fields + - Skill naming conventions and directory structure + - Description completeness and quality + - File organization and resource references + +2. **Package** the skill if validation passes, creating a .skill file named after the skill (e.g., `my-skill.skill`) that includes all files and maintains the proper directory structure for distribution. The .skill file is a zip file with a .skill extension. + +If validation fails, the script will report the errors and exit without creating a package. Fix any validation errors and run the packaging command again. + +### Step 6: Iterate + +After testing the skill, users may request improvements. Often this happens right after using the skill, with fresh context of how the skill performed. + +**Iteration workflow:** + +1. Use the skill on real tasks +2. Notice struggles or inefficiencies +3. Identify how SKILL.md or bundled resources should be updated +4. Implement changes and test again diff --git a/skills/tmux/SKILL.md b/skills/tmux/SKILL.md new file mode 100644 index 0000000..f2a3144 --- /dev/null +++ b/skills/tmux/SKILL.md @@ -0,0 +1,121 @@ +--- +name: tmux +description: Remote-control tmux sessions for interactive CLIs by sending keystrokes and scraping pane output. +metadata: {"nanobot":{"emoji":"🧵","os":["darwin","linux"],"requires":{"bins":["tmux"]}}} +--- + +# tmux Skill + +Use tmux only when you need an interactive TTY. Prefer exec background mode for long-running, non-interactive tasks. + +## Quickstart (isolated socket, exec tool) + +```bash +SOCKET_DIR="${NANOBOT_TMUX_SOCKET_DIR:-${TMPDIR:-/tmp}/nanobot-tmux-sockets}" +mkdir -p "$SOCKET_DIR" +SOCKET="$SOCKET_DIR/nanobot.sock" +SESSION=nanobot-python + +tmux -S "$SOCKET" new -d -s "$SESSION" -n shell +tmux -S "$SOCKET" send-keys -t "$SESSION":0.0 -- 'PYTHON_BASIC_REPL=1 python3 -q' Enter +tmux -S "$SOCKET" capture-pane -p -J -t "$SESSION":0.0 -S -200 +``` + +After starting a session, always print monitor commands: + +``` +To monitor: + tmux -S "$SOCKET" attach -t "$SESSION" + tmux -S "$SOCKET" capture-pane -p -J -t "$SESSION":0.0 -S -200 +``` + +## Socket convention + +- Use `NANOBOT_TMUX_SOCKET_DIR` environment variable. +- Default socket path: `"$NANOBOT_TMUX_SOCKET_DIR/nanobot.sock"`. + +## Targeting panes and naming + +- Target format: `session:window.pane` (defaults to `:0.0`). +- Keep names short; avoid spaces. +- Inspect: `tmux -S "$SOCKET" list-sessions`, `tmux -S "$SOCKET" list-panes -a`. + +## Finding sessions + +- List sessions on your socket: `{baseDir}/scripts/find-sessions.sh -S "$SOCKET"`. +- Scan all sockets: `{baseDir}/scripts/find-sessions.sh --all` (uses `NANOBOT_TMUX_SOCKET_DIR`). + +## Sending input safely + +- Prefer literal sends: `tmux -S "$SOCKET" send-keys -t target -l -- "$cmd"`. +- Control keys: `tmux -S "$SOCKET" send-keys -t target C-c`. + +## Watching output + +- Capture recent history: `tmux -S "$SOCKET" capture-pane -p -J -t target -S -200`. +- Wait for prompts: `{baseDir}/scripts/wait-for-text.sh -t session:0.0 -p 'pattern'`. +- Attaching is OK; detach with `Ctrl+b d`. + +## Spawning processes + +- For python REPLs, set `PYTHON_BASIC_REPL=1` (non-basic REPL breaks send-keys flows). + +## Windows / WSL + +- tmux is supported on macOS/Linux. On Windows, use WSL and install tmux inside WSL. +- This skill is gated to `darwin`/`linux` and requires `tmux` on PATH. + +## Orchestrating Coding Agents (Codex, Claude Code) + +tmux excels at running multiple coding agents in parallel: + +```bash +SOCKET="${TMPDIR:-/tmp}/codex-army.sock" + +# Create multiple sessions +for i in 1 2 3 4 5; do + tmux -S "$SOCKET" new-session -d -s "agent-$i" +done + +# Launch agents in different workdirs +tmux -S "$SOCKET" send-keys -t agent-1 "cd /tmp/project1 && codex --yolo 'Fix bug X'" Enter +tmux -S "$SOCKET" send-keys -t agent-2 "cd /tmp/project2 && codex --yolo 'Fix bug Y'" Enter + +# Poll for completion (check if prompt returned) +for sess in agent-1 agent-2; do + if tmux -S "$SOCKET" capture-pane -p -t "$sess" -S -3 | grep -q "❯"; then + echo "$sess: DONE" + else + echo "$sess: Running..." + fi +done + +# Get full output from completed session +tmux -S "$SOCKET" capture-pane -p -t agent-1 -S -500 +``` + +**Tips:** +- Use separate git worktrees for parallel fixes (no branch conflicts) +- `pnpm install` first before running codex in fresh clones +- Check for shell prompt (`❯` or `$`) to detect completion +- Codex needs `--yolo` or `--full-auto` for non-interactive fixes + +## Cleanup + +- Kill a session: `tmux -S "$SOCKET" kill-session -t "$SESSION"`. +- Kill all sessions on a socket: `tmux -S "$SOCKET" list-sessions -F '#{session_name}' | xargs -r -n1 tmux -S "$SOCKET" kill-session -t`. +- Remove everything on the private socket: `tmux -S "$SOCKET" kill-server`. + +## Helper: wait-for-text.sh + +`{baseDir}/scripts/wait-for-text.sh` polls a pane for a regex (or fixed string) with a timeout. + +```bash +{baseDir}/scripts/wait-for-text.sh -t session:0.0 -p 'pattern' [-F] [-T 20] [-i 0.5] [-l 2000] +``` + +- `-t`/`--target` pane target (required) +- `-p`/`--pattern` regex to match (required); add `-F` for fixed string +- `-T` timeout seconds (integer, default 15) +- `-i` poll interval seconds (default 0.5) +- `-l` history lines to search (integer, default 1000) diff --git a/skills/tmux/scripts/find-sessions.sh b/skills/tmux/scripts/find-sessions.sh new file mode 100755 index 0000000..00552c6 --- /dev/null +++ b/skills/tmux/scripts/find-sessions.sh @@ -0,0 +1,112 @@ +#!/usr/bin/env bash +set -euo pipefail + +usage() { + cat <<'USAGE' +Usage: find-sessions.sh [-L socket-name|-S socket-path|-A] [-q pattern] + +List tmux sessions on a socket (default tmux socket if none provided). + +Options: + -L, --socket tmux socket name (passed to tmux -L) + -S, --socket-path tmux socket path (passed to tmux -S) + -A, --all scan all sockets under NANOBOT_TMUX_SOCKET_DIR + -q, --query case-insensitive substring to filter session names + -h, --help show this help +USAGE +} + +socket_name="" +socket_path="" +query="" +scan_all=false +socket_dir="${NANOBOT_TMUX_SOCKET_DIR:-${TMPDIR:-/tmp}/nanobot-tmux-sockets}" + +while [[ $# -gt 0 ]]; do + case "$1" in + -L|--socket) socket_name="${2-}"; shift 2 ;; + -S|--socket-path) socket_path="${2-}"; shift 2 ;; + -A|--all) scan_all=true; shift ;; + -q|--query) query="${2-}"; shift 2 ;; + -h|--help) usage; exit 0 ;; + *) echo "Unknown option: $1" >&2; usage; exit 1 ;; + esac +done + +if [[ "$scan_all" == true && ( -n "$socket_name" || -n "$socket_path" ) ]]; then + echo "Cannot combine --all with -L or -S" >&2 + exit 1 +fi + +if [[ -n "$socket_name" && -n "$socket_path" ]]; then + echo "Use either -L or -S, not both" >&2 + exit 1 +fi + +if ! command -v tmux >/dev/null 2>&1; then + echo "tmux not found in PATH" >&2 + exit 1 +fi + +list_sessions() { + local label="$1"; shift + local tmux_cmd=(tmux "$@") + + if ! sessions="$("${tmux_cmd[@]}" list-sessions -F '#{session_name}\t#{session_attached}\t#{session_created_string}' 2>/dev/null)"; then + echo "No tmux server found on $label" >&2 + return 1 + fi + + if [[ -n "$query" ]]; then + sessions="$(printf '%s\n' "$sessions" | grep -i -- "$query" || true)" + fi + + if [[ -z "$sessions" ]]; then + echo "No sessions found on $label" + return 0 + fi + + echo "Sessions on $label:" + printf '%s\n' "$sessions" | while IFS=$'\t' read -r name attached created; do + attached_label=$([[ "$attached" == "1" ]] && echo "attached" || echo "detached") + printf ' - %s (%s, started %s)\n' "$name" "$attached_label" "$created" + done +} + +if [[ "$scan_all" == true ]]; then + if [[ ! -d "$socket_dir" ]]; then + echo "Socket directory not found: $socket_dir" >&2 + exit 1 + fi + + shopt -s nullglob + sockets=("$socket_dir"/*) + shopt -u nullglob + + if [[ "${#sockets[@]}" -eq 0 ]]; then + echo "No sockets found under $socket_dir" >&2 + exit 1 + fi + + exit_code=0 + for sock in "${sockets[@]}"; do + if [[ ! -S "$sock" ]]; then + continue + fi + list_sessions "socket path '$sock'" -S "$sock" || exit_code=$? + done + exit "$exit_code" +fi + +tmux_cmd=(tmux) +socket_label="default socket" + +if [[ -n "$socket_name" ]]; then + tmux_cmd+=(-L "$socket_name") + socket_label="socket name '$socket_name'" +elif [[ -n "$socket_path" ]]; then + tmux_cmd+=(-S "$socket_path") + socket_label="socket path '$socket_path'" +fi + +list_sessions "$socket_label" "${tmux_cmd[@]:1}" diff --git a/skills/tmux/scripts/wait-for-text.sh b/skills/tmux/scripts/wait-for-text.sh new file mode 100755 index 0000000..56354be --- /dev/null +++ b/skills/tmux/scripts/wait-for-text.sh @@ -0,0 +1,83 @@ +#!/usr/bin/env bash +set -euo pipefail + +usage() { + cat <<'USAGE' +Usage: wait-for-text.sh -t target -p pattern [options] + +Poll a tmux pane for text and exit when found. + +Options: + -t, --target tmux target (session:window.pane), required + -p, --pattern regex pattern to look for, required + -F, --fixed treat pattern as a fixed string (grep -F) + -T, --timeout seconds to wait (integer, default: 15) + -i, --interval poll interval in seconds (default: 0.5) + -l, --lines number of history lines to inspect (integer, default: 1000) + -h, --help show this help +USAGE +} + +target="" +pattern="" +grep_flag="-E" +timeout=15 +interval=0.5 +lines=1000 + +while [[ $# -gt 0 ]]; do + case "$1" in + -t|--target) target="${2-}"; shift 2 ;; + -p|--pattern) pattern="${2-}"; shift 2 ;; + -F|--fixed) grep_flag="-F"; shift ;; + -T|--timeout) timeout="${2-}"; shift 2 ;; + -i|--interval) interval="${2-}"; shift 2 ;; + -l|--lines) lines="${2-}"; shift 2 ;; + -h|--help) usage; exit 0 ;; + *) echo "Unknown option: $1" >&2; usage; exit 1 ;; + esac +done + +if [[ -z "$target" || -z "$pattern" ]]; then + echo "target and pattern are required" >&2 + usage + exit 1 +fi + +if ! [[ "$timeout" =~ ^[0-9]+$ ]]; then + echo "timeout must be an integer number of seconds" >&2 + exit 1 +fi + +if ! [[ "$lines" =~ ^[0-9]+$ ]]; then + echo "lines must be an integer" >&2 + exit 1 +fi + +if ! command -v tmux >/dev/null 2>&1; then + echo "tmux not found in PATH" >&2 + exit 1 +fi + +# End time in epoch seconds (integer, good enough for polling) +start_epoch=$(date +%s) +deadline=$((start_epoch + timeout)) + +while true; do + # -J joins wrapped lines, -S uses negative index to read last N lines + pane_text="$(tmux capture-pane -p -J -t "$target" -S "-${lines}" 2>/dev/null || true)" + + if printf '%s\n' "$pane_text" | grep $grep_flag -- "$pattern" >/dev/null 2>&1; then + exit 0 + fi + + now=$(date +%s) + if (( now >= deadline )); then + echo "Timed out after ${timeout}s waiting for pattern: $pattern" >&2 + echo "Last ${lines} lines from $target:" >&2 + printf '%s\n' "$pane_text" >&2 + exit 1 + fi + + sleep "$interval" +done diff --git a/skills/video-frames/SKILL.md b/skills/video-frames/SKILL.md new file mode 100644 index 0000000..0aca9fb --- /dev/null +++ b/skills/video-frames/SKILL.md @@ -0,0 +1,46 @@ +--- +name: video-frames +description: Extract frames or short clips from videos using ffmpeg. +homepage: https://ffmpeg.org +metadata: + { + "openclaw": + { + "emoji": "🎞️", + "requires": { "bins": ["ffmpeg"] }, + "install": + [ + { + "id": "brew", + "kind": "brew", + "formula": "ffmpeg", + "bins": ["ffmpeg"], + "label": "Install ffmpeg (brew)", + }, + ], + }, + } +--- + +# Video Frames (ffmpeg) + +Extract a single frame from a video, or create quick thumbnails for inspection. + +## Quick start + +First frame: + +```bash +{baseDir}/scripts/frame.sh /path/to/video.mp4 --out /tmp/frame.jpg +``` + +At a timestamp: + +```bash +{baseDir}/scripts/frame.sh /path/to/video.mp4 --time 00:00:10 --out /tmp/frame-10s.jpg +``` + +## Notes + +- Prefer `--time` for “what is happening around here?”. +- Use a `.jpg` for quick share; use `.png` for crisp UI frames. diff --git a/skills/video-frames/scripts/frame.sh b/skills/video-frames/scripts/frame.sh new file mode 100755 index 0000000..31b3adb --- /dev/null +++ b/skills/video-frames/scripts/frame.sh @@ -0,0 +1,81 @@ +#!/usr/bin/env bash +set -euo pipefail + +usage() { + cat >&2 <<'EOF' +Usage: + frame.sh [--time HH:MM:SS] [--index N] --out /path/to/frame.jpg + +Examples: + frame.sh video.mp4 --out /tmp/frame.jpg + frame.sh video.mp4 --time 00:00:10 --out /tmp/frame-10s.jpg + frame.sh video.mp4 --index 0 --out /tmp/frame0.png +EOF + exit 2 +} + +if [[ "${1:-}" == "" || "${1:-}" == "-h" || "${1:-}" == "--help" ]]; then + usage +fi + +in="${1:-}" +shift || true + +time="" +index="" +out="" + +while [[ $# -gt 0 ]]; do + case "$1" in + --time) + time="${2:-}" + shift 2 + ;; + --index) + index="${2:-}" + shift 2 + ;; + --out) + out="${2:-}" + shift 2 + ;; + *) + echo "Unknown arg: $1" >&2 + usage + ;; + esac +done + +if [[ ! -f "$in" ]]; then + echo "File not found: $in" >&2 + exit 1 +fi + +if [[ "$out" == "" ]]; then + echo "Missing --out" >&2 + usage +fi + +mkdir -p "$(dirname "$out")" + +if [[ "$index" != "" ]]; then + ffmpeg -hide_banner -loglevel error -y \ + -i "$in" \ + -vf "select=eq(n\\,${index})" \ + -vframes 1 \ + "$out" +elif [[ "$time" != "" ]]; then + ffmpeg -hide_banner -loglevel error -y \ + -ss "$time" \ + -i "$in" \ + -frames:v 1 \ + "$out" +else + ffmpeg -hide_banner -loglevel error -y \ + -i "$in" \ + -vf "select=eq(n\\,0)" \ + -vframes 1 \ + "$out" +fi + +echo "$out" diff --git a/skills/weather/SKILL.md b/skills/weather/SKILL.md new file mode 100644 index 0000000..8073de1 --- /dev/null +++ b/skills/weather/SKILL.md @@ -0,0 +1,49 @@ +--- +name: weather +description: Get current weather and forecasts (no API key required). +homepage: https://wttr.in/:help +metadata: {"nanobot":{"emoji":"🌤️","requires":{"bins":["curl"]}}} +--- + +# Weather + +Two free services, no API keys needed. + +## wttr.in (primary) + +Quick one-liner: +```bash +curl -s "wttr.in/London?format=3" +# Output: London: ⛅️ +8°C +``` + +Compact format: +```bash +curl -s "wttr.in/London?format=%l:+%c+%t+%h+%w" +# Output: London: ⛅️ +8°C 71% ↙5km/h +``` + +Full forecast: +```bash +curl -s "wttr.in/London?T" +``` + +Format codes: `%c` condition · `%t` temp · `%h` humidity · `%w` wind · `%l` location · `%m` moon + +Tips: +- URL-encode spaces: `wttr.in/New+York` +- Airport codes: `wttr.in/JFK` +- Units: `?m` (metric) `?u` (USCS) +- Today only: `?1` · Current only: `?0` +- PNG: `curl -s "wttr.in/Berlin.png" -o /tmp/weather.png` + +## Open-Meteo (fallback, JSON) + +Free, no key, good for programmatic use: +```bash +curl -s "https://api.open-meteo.com/v1/forecast?latitude=51.5&longitude=-0.12¤t_weather=true" +``` + +Find coordinates for a city, then query. Returns JSON with temp, windspeed, weathercode. + +Docs: https://open-meteo.com/en/docs