optimize session memory and language routing for migration

This commit is contained in:
DBT
2026-02-23 09:49:49 +00:00
parent a3097c4c30
commit e038c00ebc
6 changed files with 658 additions and 5812 deletions

View File

@@ -163,7 +163,7 @@ func (cb *ContextBuilder) LoadBootstrapFiles() string {
return result
}
func (cb *ContextBuilder) BuildMessages(history []providers.Message, summary string, currentMessage string, media []string, channel, chatID string) []providers.Message {
func (cb *ContextBuilder) BuildMessages(history []providers.Message, summary string, currentMessage string, media []string, channel, chatID, responseLanguage string) []providers.Message {
messages := []providers.Message{}
systemPrompt := cb.BuildSystemPrompt()
@@ -173,6 +173,9 @@ func (cb *ContextBuilder) BuildMessages(history []providers.Message, summary str
if channel != "" && chatID != "" {
systemPrompt += fmt.Sprintf("\n\n## Current Session\nChannel: %s\nChat ID: %s", channel, chatID)
}
if responseLanguage != "" {
systemPrompt += fmt.Sprintf("\n\n## Response Language\nReply in %s unless user explicitly asks to switch language. Keep code identifiers and CLI commands unchanged.", responseLanguage)
}
// Log system prompt summary for debugging (debug mode only)
logger.DebugCF("agent", "System prompt built",

108
pkg/agent/language.go Normal file
View File

@@ -0,0 +1,108 @@
package agent
import "strings"
// DetectResponseLanguage returns a BCP-47 style language tag for reply policy.
// Priority: explicit preference > current user text > last session language > default English.
func DetectResponseLanguage(userText, preferred, last string) string {
if p := normalizeLang(preferred); p != "" {
return p
}
if detected := detectFromText(userText); detected != "" {
return detected
}
if l := normalizeLang(last); l != "" {
return l
}
return "en"
}
func detectFromText(text string) string {
text = strings.TrimSpace(text)
if text == "" {
return ""
}
var zh, ja, ko, letters int
for _, r := range text {
switch {
case r >= 0x4E00 && r <= 0x9FFF:
zh++
case r >= 0x3040 && r <= 0x30FF:
ja++
case r >= 0xAC00 && r <= 0xD7AF:
ko++
case (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z'):
letters++
}
}
// CJK-first heuristic to match user typing language quickly.
if zh > 0 {
return "zh-CN"
}
if ja > 0 {
return "ja"
}
if ko > 0 {
return "ko"
}
if letters > 0 {
return "en"
}
return ""
}
func normalizeLang(lang string) string {
lang = strings.TrimSpace(strings.ToLower(lang))
switch lang {
case "zh", "zh-cn", "zh_hans", "chinese":
return "zh-CN"
case "en", "en-us", "english":
return "en"
case "ja", "jp", "japanese":
return "ja"
case "ko", "kr", "korean":
return "ko"
default:
if lang == "" {
return ""
}
return lang
}
}
// ExtractLanguagePreference detects explicit user instructions for language switch.
func ExtractLanguagePreference(text string) string {
s := strings.ToLower(strings.TrimSpace(text))
if s == "" {
return ""
}
enHints := []string{"speak english", "reply in english", "use english", "以后用英文", "请用英文", "用英文"}
zhHints := []string{"说中文", "用中文", "请用中文", "reply in chinese", "speak chinese"}
jaHints := []string{"日本語", "reply in japanese", "speak japanese"}
koHints := []string{"한국어", "reply in korean", "speak korean"}
for _, h := range enHints {
if strings.Contains(s, strings.ToLower(h)) {
return "en"
}
}
for _, h := range zhHints {
if strings.Contains(s, strings.ToLower(h)) {
return "zh-CN"
}
}
for _, h := range jaHints {
if strings.Contains(s, strings.ToLower(h)) {
return "ja"
}
}
for _, h := range koHints {
if strings.Contains(s, strings.ToLower(h)) {
return "ko"
}
}
return ""
}

File diff suppressed because it is too large Load Diff

View File

@@ -10,113 +10,41 @@ import (
"fmt"
"os"
"path/filepath"
"strings"
"sync"
"time"
"clawgo/pkg/config"
"clawgo/pkg/logger"
)
const (
maxMemoryContextChars = 6000
maxLongTermMemoryChars = 2200
maxRecentNotesChars = 1600
maxMemoryLayerPartChars = 1200
maxMemoryDigestLines = 14
)
// MemoryStore manages persistent memory for the agent.
// - Long-term memory: memory/MEMORY.md
// - Daily notes: memory/YYYYMM/YYYYMMDD.md
// - Long-term memory: MEMORY.md (workspace root, compatible with OpenClaw)
// - Daily notes: memory/YYYY-MM-DD.md
// It also supports legacy locations for backward compatibility.
type MemoryStore struct {
workspace string
memoryDir string
memoryFile string
layered bool
recentDays int
includeProfile bool
includeProject bool
includeProcedure bool
mu sync.Mutex
workspace string
memoryDir string
memoryFile string
legacyMemoryFile string
}
// NewMemoryStore creates a new MemoryStore with the given workspace path.
// It ensures the memory directory exists.
func NewMemoryStore(workspace string, cfg config.MemoryConfig) *MemoryStore {
func NewMemoryStore(workspace string) *MemoryStore {
memoryDir := filepath.Join(workspace, "memory")
memoryFile := filepath.Join(memoryDir, "MEMORY.md")
memoryFile := filepath.Join(workspace, "MEMORY.md")
legacyMemoryFile := filepath.Join(memoryDir, "MEMORY.md")
// Ensure memory directory exists
if err := os.MkdirAll(memoryDir, 0755); err != nil {
logger.ErrorCF("memory", "Failed to create memory directory", map[string]interface{}{
"memory_dir": memoryDir,
logger.FieldError: err.Error(),
})
}
// Ensure MEMORY.md exists for first run (even without onboard).
if _, err := os.Stat(memoryFile); os.IsNotExist(err) {
initial := `# Long-term Memory
This file stores important information that should persist across sessions.
## User Information
(Important facts about user)
## Preferences
(User preferences learned over time)
## Important Notes
(Things to remember)
`
if writeErr := os.WriteFile(memoryFile, []byte(initial), 0644); writeErr != nil {
logger.ErrorCF("memory", "Failed to initialize MEMORY.md", map[string]interface{}{
"memory_file": memoryFile,
logger.FieldError: writeErr.Error(),
})
}
}
if cfg.Layered {
_ = os.MkdirAll(filepath.Join(memoryDir, "layers"), 0755)
ensureLayerFile(filepath.Join(memoryDir, "layers", "profile.md"), "# User Profile\n\nStable user profile, preferences, identity traits.\n")
ensureLayerFile(filepath.Join(memoryDir, "layers", "project.md"), "# Project Memory\n\nProject-specific architecture decisions and constraints.\n")
ensureLayerFile(filepath.Join(memoryDir, "layers", "procedures.md"), "# Procedures Memory\n\nReusable workflows, command recipes, and runbooks.\n")
}
recentDays := cfg.RecentDays
if recentDays <= 0 {
recentDays = 3
}
os.MkdirAll(memoryDir, 0755)
return &MemoryStore{
workspace: workspace,
memoryDir: memoryDir,
memoryFile: memoryFile,
layered: cfg.Layered,
recentDays: recentDays,
includeProfile: cfg.Layers.Profile,
includeProject: cfg.Layers.Project,
includeProcedure: cfg.Layers.Procedures,
legacyMemoryFile: legacyMemoryFile,
}
}
func ensureLayerFile(path, initial string) {
if _, err := os.Stat(path); os.IsNotExist(err) {
_ = os.WriteFile(path, []byte(initial), 0644)
}
}
// getTodayFile returns the path to today's daily note file (memory/YYYYMM/YYYYMMDD.md).
// getTodayFile returns the path to today's daily note file (memory/YYYY-MM-DD.md).
func (ms *MemoryStore) getTodayFile() string {
today := time.Now().Format("20060102") // YYYYMMDD
monthDir := today[:6] // YYYYMM
filePath := filepath.Join(ms.memoryDir, monthDir, today+".md")
return filePath
return filepath.Join(ms.memoryDir, time.Now().Format("2006-01-02")+".md")
}
// ReadLongTerm reads the long-term memory (MEMORY.md).
@@ -125,18 +53,15 @@ func (ms *MemoryStore) ReadLongTerm() string {
if data, err := os.ReadFile(ms.memoryFile); err == nil {
return string(data)
}
if data, err := os.ReadFile(ms.legacyMemoryFile); err == nil {
return string(data)
}
return ""
}
// WriteLongTerm writes content to the long-term memory file (MEMORY.md).
func (ms *MemoryStore) WriteLongTerm(content string) error {
ms.mu.Lock()
defer ms.mu.Unlock()
if err := os.MkdirAll(ms.memoryDir, 0755); err != nil {
return err
}
return atomicWriteFile(ms.memoryFile, []byte(content), 0644)
return os.WriteFile(ms.memoryFile, []byte(content), 0644)
}
// ReadToday reads today's daily note.
@@ -152,39 +77,27 @@ func (ms *MemoryStore) ReadToday() string {
// AppendToday appends content to today's daily note.
// If the file doesn't exist, it creates a new file with a date header.
func (ms *MemoryStore) AppendToday(content string) error {
ms.mu.Lock()
defer ms.mu.Unlock()
todayFile := ms.getTodayFile()
// Ensure month directory exists
monthDir := filepath.Dir(todayFile)
if err := os.MkdirAll(monthDir, 0755); err != nil {
return err
// Ensure memory directory exists
os.MkdirAll(ms.memoryDir, 0755)
var existingContent string
if data, err := os.ReadFile(todayFile); err == nil {
existingContent = string(data)
}
f, err := os.OpenFile(todayFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
return err
}
defer f.Close()
info, err := f.Stat()
if err != nil {
return err
}
payload := content
if info.Size() == 0 {
var newContent string
if existingContent == "" {
// Add header for new day
header := fmt.Sprintf("# %s\n\n", time.Now().Format("2006-01-02"))
payload = header + content
newContent = header + content
} else {
payload = "\n" + content
// Append to existing content
newContent = existingContent + "\n" + content
}
_, err = f.WriteString(payload)
return err
return os.WriteFile(todayFile, []byte(newContent), 0644)
}
// GetRecentDailyNotes returns daily notes from the last N days.
@@ -194,11 +107,18 @@ func (ms *MemoryStore) GetRecentDailyNotes(days int) string {
for i := 0; i < days; i++ {
date := time.Now().AddDate(0, 0, -i)
dateStr := date.Format("20060102") // YYYYMMDD
monthDir := dateStr[:6] // YYYYMM
filePath := filepath.Join(ms.memoryDir, monthDir, dateStr+".md")
if data, err := os.ReadFile(filePath); err == nil {
// Preferred format: memory/YYYY-MM-DD.md
newPath := filepath.Join(ms.memoryDir, date.Format("2006-01-02")+".md")
if data, err := os.ReadFile(newPath); err == nil {
notes = append(notes, string(data))
continue
}
// Backward-compatible format: memory/YYYYMM/YYYYMMDD.md
legacyDate := date.Format("20060102")
legacyPath := filepath.Join(ms.memoryDir, legacyDate[:6], legacyDate+".md")
if data, err := os.ReadFile(legacyPath); err == nil {
notes = append(notes, string(data))
}
}
@@ -223,21 +143,16 @@ func (ms *MemoryStore) GetRecentDailyNotes(days int) string {
func (ms *MemoryStore) GetMemoryContext() string {
var parts []string
if ms.layered {
layerParts := ms.getLayeredContext()
parts = append(parts, layerParts...)
}
// Long-term memory
longTerm := ms.ReadLongTerm()
if longTerm != "" {
parts = append(parts, "## Long-term Memory (Digest)\n\n"+compressMemoryForPrompt(longTerm, maxMemoryDigestLines, maxLongTermMemoryChars))
parts = append(parts, "## Long-term Memory\n\n"+longTerm)
}
// Recent daily notes
recentNotes := ms.GetRecentDailyNotes(ms.recentDays)
// Recent daily notes (last 3 days)
recentNotes := ms.GetRecentDailyNotes(3)
if recentNotes != "" {
parts = append(parts, "## Recent Daily Notes (Digest)\n\n"+compressMemoryForPrompt(recentNotes, maxMemoryDigestLines, maxRecentNotesChars))
parts = append(parts, "## Recent Daily Notes\n\n"+recentNotes)
}
if len(parts) == 0 {
@@ -252,122 +167,5 @@ func (ms *MemoryStore) GetMemoryContext() string {
}
result += part
}
return fmt.Sprintf("# Memory\n\n%s", truncateMemoryText(result, maxMemoryContextChars))
}
func (ms *MemoryStore) getLayeredContext() []string {
parts := []string{}
readLayer := func(filename, title string) {
data, err := os.ReadFile(filepath.Join(ms.memoryDir, "layers", filename))
if err != nil {
return
}
content := string(data)
if strings.TrimSpace(content) == "" {
return
}
parts = append(parts, fmt.Sprintf("## %s (Digest)\n\n%s", title, compressMemoryForPrompt(content, maxMemoryDigestLines, maxMemoryLayerPartChars)))
}
if ms.includeProfile {
readLayer("profile.md", "Memory Layer: Profile")
}
if ms.includeProject {
readLayer("project.md", "Memory Layer: Project")
}
if ms.includeProcedure {
readLayer("procedures.md", "Memory Layer: Procedures")
}
return parts
}
func truncateMemoryText(content string, maxChars int) string {
if maxChars <= 0 {
return strings.TrimSpace(content)
}
trimmed := strings.TrimSpace(content)
runes := []rune(trimmed)
if len(runes) <= maxChars {
return trimmed
}
suffix := "\n\n...[truncated]"
suffixRunes := []rune(suffix)
if maxChars <= len(suffixRunes) {
return string(runes[:maxChars])
}
return strings.TrimSpace(string(runes[:maxChars-len(suffixRunes)])) + suffix
}
func compressMemoryForPrompt(content string, maxLines, maxChars int) string {
trimmed := strings.TrimSpace(content)
if trimmed == "" {
return ""
}
if maxLines <= 0 {
maxLines = maxMemoryDigestLines
}
lines := strings.Split(trimmed, "\n")
kept := make([]string, 0, maxLines)
inParagraph := false
for _, raw := range lines {
line := strings.TrimSpace(raw)
if line == "" {
inParagraph = false
continue
}
isHeading := strings.HasPrefix(line, "#")
isBullet := strings.HasPrefix(line, "- ") || strings.HasPrefix(line, "* ")
isNumbered := isNumberedListLine(line)
if isHeading || isBullet || isNumbered {
kept = append(kept, line)
inParagraph = false
} else if !inParagraph {
// Keep only the first line of each paragraph to form a compact digest.
kept = append(kept, line)
inParagraph = true
}
if len(kept) >= maxLines {
break
}
}
if len(kept) == 0 {
for _, raw := range lines {
line := strings.TrimSpace(raw)
if line == "" {
continue
}
kept = append(kept, line)
if len(kept) >= maxLines {
break
}
}
}
return truncateMemoryText(strings.Join(kept, "\n"), maxChars)
}
func isNumberedListLine(line string) bool {
dot := strings.Index(line, ".")
if dot <= 0 || dot >= len(line)-1 {
return false
}
for i := 0; i < dot; i++ {
if line[i] < '0' || line[i] > '9' {
return false
}
}
return line[dot+1] == ' '
}
func atomicWriteFile(path string, data []byte, perm os.FileMode) error {
tmpPath := path + ".tmp"
if err := os.WriteFile(tmpPath, data, perm); err != nil {
return err
}
return os.Rename(tmpPath, path)
return fmt.Sprintf("# Memory\n\n%s", result)
}

File diff suppressed because it is too large Load Diff

View File

@@ -6,8 +6,6 @@ import (
"fmt"
"os"
"path/filepath"
"sort"
"strconv"
"strings"
"sync"
)
@@ -55,12 +53,6 @@ type searchResult struct {
score int
}
type fileSearchOutcome struct {
matches []searchResult
err error
file string
}
func (t *MemorySearchTool) Execute(ctx context.Context, args map[string]interface{}) (string, error) {
query, ok := args["query"].(string)
if !ok || query == "" {
@@ -68,14 +60,8 @@ func (t *MemorySearchTool) Execute(ctx context.Context, args map[string]interfac
}
maxResults := 5
if m, ok := parseIntArg(args["maxResults"]); ok {
maxResults = m
}
if maxResults < 1 {
maxResults = 1
}
if maxResults > 50 {
maxResults = 50
if m, ok := args["maxResults"].(float64); ok {
maxResults = int(m)
}
keywords := strings.Fields(strings.ToLower(query))
@@ -84,118 +70,48 @@ func (t *MemorySearchTool) Execute(ctx context.Context, args map[string]interfac
}
files := t.getMemoryFiles()
if len(files) == 0 {
return fmt.Sprintf("No memory files found for query: %s", query), nil
}
// Fast path: structured memory index.
if idx, err := t.loadOrBuildIndex(files); err == nil && idx != nil {
// If index has entries, use it. Otherwise fallback to file scan so parser/read warnings are visible.
if len(idx.Entries) > 0 {
results := t.searchInIndex(idx, keywords)
return t.renderSearchResults(query, results, maxResults), nil
}
}
resultsChan := make(chan fileSearchOutcome, len(files))
resultsChan := make(chan []searchResult, len(files))
var wg sync.WaitGroup
// Search all files concurrently
// 并发搜索所有文件
for _, file := range files {
wg.Add(1)
go func(f string) {
defer wg.Done()
matches, err := t.searchFile(f, keywords)
resultsChan <- fileSearchOutcome{matches: matches, err: err, file: f}
if err == nil {
resultsChan <- matches
}
}(file)
}
// Close channel asynchronously
// 异步关闭通道
go func() {
wg.Wait()
close(resultsChan)
}()
var allResults []searchResult
var failedFiles []string
for outcome := range resultsChan {
if outcome.err != nil {
relPath, _ := filepath.Rel(t.workspace, outcome.file)
if relPath == "" {
relPath = outcome.file
for matches := range resultsChan {
allResults = append(allResults, matches...)
}
// Simple ranking: sort by score (number of keyword matches) desc
for i := 0; i < len(allResults); i++ {
for j := i + 1; j < len(allResults); j++ {
if allResults[j].score > allResults[i].score {
allResults[i], allResults[j] = allResults[j], allResults[i]
}
failedFiles = append(failedFiles, relPath)
continue
}
allResults = append(allResults, outcome.matches...)
}
output := t.renderSearchResults(query, allResults, maxResults)
if len(failedFiles) > 0 {
suffix := formatSearchWarningSuffix(failedFiles)
if strings.HasPrefix(output, "No memory found for query:") {
return output + suffix, nil
}
return output + "\n" + suffix, nil
}
return output, nil
}
func (t *MemorySearchTool) searchInIndex(idx *memoryIndex, keywords []string) []searchResult {
type scoreItem struct {
entry memoryIndexEntry
score int
}
acc := make(map[int]int)
for _, kw := range keywords {
token := strings.ToLower(strings.TrimSpace(kw))
for _, entryID := range idx.Inverted[token] {
acc[entryID]++
}
}
out := make([]scoreItem, 0, len(acc))
for entryID, score := range acc {
if entryID < 0 || entryID >= len(idx.Entries) || score <= 0 {
continue
}
out = append(out, scoreItem{
entry: idx.Entries[entryID],
score: score,
})
}
sort.Slice(out, func(i, j int) bool {
if out[i].score == out[j].score {
return out[i].entry.LineNum < out[j].entry.LineNum
}
return out[i].score > out[j].score
})
results := make([]searchResult, 0, len(out))
for _, item := range out {
results = append(results, searchResult{
file: item.entry.File,
lineNum: item.entry.LineNum,
content: item.entry.Content,
score: item.score,
})
}
return results
}
func (t *MemorySearchTool) renderSearchResults(query string, allResults []searchResult, maxResults int) string {
sort.Slice(allResults, func(i, j int) bool {
if allResults[i].score == allResults[j].score {
return allResults[i].lineNum < allResults[j].lineNum
}
return allResults[i].score > allResults[j].score
})
if len(allResults) > maxResults {
allResults = allResults[:maxResults]
}
if len(allResults) == 0 {
return fmt.Sprintf("No memory found for query: %s", query)
return fmt.Sprintf("No memory found for query: %s", query), nil
}
var sb strings.Builder
@@ -204,46 +120,51 @@ func (t *MemorySearchTool) renderSearchResults(query string, allResults []search
relPath, _ := filepath.Rel(t.workspace, res.file)
sb.WriteString(fmt.Sprintf("--- Source: %s:%d ---\n%s\n\n", relPath, res.lineNum, res.content))
}
return sb.String()
return sb.String(), nil
}
func (t *MemorySearchTool) getMemoryFiles() []string {
var files []string
seen := map[string]struct{}{}
addIfExists := func(path string) {
if _, ok := seen[path]; ok {
return
}
if _, err := os.Stat(path); err == nil {
files = append(files, path)
seen[path] = struct{}{}
}
// Check workspace MEMORY.md first
mainMem := filepath.Join(t.workspace, "MEMORY.md")
if _, err := os.Stat(mainMem); err == nil {
files = append(files, mainMem)
}
// Prefer canonical long-term memory path.
canonical := filepath.Join(t.workspace, "memory", "MEMORY.md")
addIfExists(canonical)
// Legacy path fallback only when canonical file is absent.
if _, err := os.Stat(canonical); err != nil {
addIfExists(filepath.Join(t.workspace, "MEMORY.md"))
// Backward-compatible location: memory/MEMORY.md
legacyMem := filepath.Join(t.workspace, "memory", "MEMORY.md")
if _, err := os.Stat(legacyMem); err == nil {
files = append(files, legacyMem)
}
// Check memory/ directory recursively (e.g., memory/YYYYMM/YYYYMMDD.md).
// Recursively include memory/**/*.md
memDir := filepath.Join(t.workspace, "memory")
_ = filepath.Walk(memDir, func(path string, info os.FileInfo, err error) error {
if err != nil || info == nil || info.IsDir() {
_ = filepath.WalkDir(memDir, func(path string, d os.DirEntry, err error) error {
if err != nil || d == nil || d.IsDir() {
return nil
}
if strings.HasSuffix(strings.ToLower(info.Name()), ".md") {
if _, ok := seen[path]; !ok {
files = append(files, path)
seen[path] = struct{}{}
}
if strings.HasSuffix(strings.ToLower(d.Name()), ".md") {
files = append(files, path)
}
return nil
})
return files
return dedupeStrings(files)
}
func dedupeStrings(items []string) []string {
seen := make(map[string]struct{}, len(items))
out := make([]string, 0, len(items))
for _, v := range items {
if _, ok := seen[v]; ok {
continue
}
seen[v] = struct{}{}
out = append(out, v)
}
return out
}
// searchFile parses the markdown file into blocks (paragraphs/list items) and searches them
@@ -256,7 +177,6 @@ func (t *MemorySearchTool) searchFile(path string, keywords []string) ([]searchR
var results []searchResult
scanner := bufio.NewScanner(file)
scanner.Buffer(make([]byte, 64*1024), 1024*1024)
var currentBlock strings.Builder
var blockStartLine int = 1
@@ -289,12 +209,7 @@ func (t *MemorySearchTool) searchFile(path string, keywords []string) ([]searchR
}
}
// Keep all blocks when keywords are empty (index build).
if len(keywords) == 0 {
score = 1
}
// Only keep if at least one keyword matched.
// Only keep if at least one keyword matched
if score > 0 {
results = append(results, searchResult{
file: path,
@@ -316,7 +231,7 @@ func (t *MemorySearchTool) searchFile(path string, keywords []string) ([]searchR
// 1. Headers start new blocks
// 2. Empty lines separate blocks
// 3. List items start new blocks (optional, but good for logs)
isHeader := strings.HasPrefix(trimmed, "#")
isEmpty := trimmed == ""
isList := strings.HasPrefix(trimmed, "- ") || strings.HasPrefix(trimmed, "* ") || (len(trimmed) > 3 && trimmed[1] == '.' && trimmed[2] == ' ')
@@ -348,41 +263,5 @@ func (t *MemorySearchTool) searchFile(path string, keywords []string) ([]searchR
}
processBlock() // Flush last block
if err := scanner.Err(); err != nil {
return nil, err
}
return results, nil
}
func parseIntArg(value interface{}) (int, bool) {
switch v := value.(type) {
case float64:
return int(v), true
case int:
return v, true
case int64:
return int(v), true
case string:
n, err := strconv.Atoi(strings.TrimSpace(v))
if err == nil {
return n, true
}
}
return 0, false
}
func formatSearchWarningSuffix(failedFiles []string) string {
if len(failedFiles) == 0 {
return ""
}
maxShown := 3
shown := failedFiles
if len(shown) > maxShown {
shown = shown[:maxShown]
}
msg := fmt.Sprintf("Warning: memory_search skipped %d file(s) due to read/parse errors: %s", len(failedFiles), strings.Join(shown, ", "))
if len(failedFiles) > maxShown {
msg += ", ..."
}
return msg
}