/ internal / tui / compact.go
compact.go
  1  package tui
  2  
  3  import (
  4  	"context"
  5  	"fmt"
  6  	"strings"
  7  	"time"
  8  
  9  	"github.com/charmbracelet/lipgloss"
 10  
 11  	"github.com/Kocoro-lab/ShanClaw/internal/agent"
 12  	"github.com/Kocoro-lab/ShanClaw/internal/client"
 13  	ctxwin "github.com/Kocoro-lab/ShanClaw/internal/context"
 14  	"github.com/Kocoro-lab/ShanClaw/internal/session"
 15  )
 16  
 17  type compactDoneMsg struct {
 18  	beforeTokens int
 19  	afterTokens  int
 20  	summary      string
 21  	err          error
 22  }
 23  
 24  // runCompact performs context compaction: persist learnings → summarize → shape history.
 25  func (m *Model) runCompact(customInstructions string) func() compactDoneMsg {
 26  	return func() compactDoneMsg {
 27  		sess := m.sessions.Current()
 28  		if sess == nil {
 29  			return compactDoneMsg{err: fmt.Errorf("no active session")}
 30  		}
 31  		messages := sess.Messages
 32  		if len(messages) < ctxwin.MinShapeable() {
 33  			return compactDoneMsg{err: fmt.Errorf("conversation too short to compact (need %d+ messages, have %d)", ctxwin.MinShapeable(), len(messages))}
 34  		}
 35  
 36  		beforeTokens := ctxwin.EstimateTokens(messages)
 37  
 38  		ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second)
 39  		defer cancel()
 40  		var usage agent.UsageAccumulator
 41  
 42  		// Step 1: persist learnings to MEMORY.md
 43  		memoryDir := m.shannonDir + "/memory"
 44  		if m.agentOverride != nil {
 45  			memoryDir = fmt.Sprintf("%s/agents/%s", m.shannonDir, m.agentOverride.Name)
 46  		}
 47  		plUsage, _ := ctxwin.PersistLearnings(ctx, m.gateway, messages, memoryDir)
 48  		if plUsage.InputTokens > 0 || plUsage.OutputTokens > 0 {
 49  			usage.Add(agent.LLMUsageDelta(plUsage, ""))
 50  		}
 51  
 52  		// Step 2: generate summary
 53  		msgsForSummary := messages
 54  		if customInstructions != "" {
 55  			hint := client.Message{
 56  				Role:    "user",
 57  				Content: client.NewTextContent("Summarization focus: " + customInstructions),
 58  			}
 59  			msgsForSummary = append([]client.Message{hint}, messages...)
 60  		}
 61  		summary, sumUsage, err := ctxwin.GenerateSummary(ctx, m.gateway, msgsForSummary)
 62  		if sumUsage.InputTokens > 0 || sumUsage.OutputTokens > 0 {
 63  			usage.Add(agent.LLMUsageDelta(sumUsage, ""))
 64  		}
 65  		if err != nil {
 66  			return compactDoneMsg{err: fmt.Errorf("summarization failed: %w", err)}
 67  		}
 68  
 69  		// Step 3: shape history.
 70  		// ShapeHistory expects [system] + [first user] + ... but TUI sessions
 71  		// don't persist the system prompt. Prepend a placeholder so the array
 72  		// layout matches, then strip it from the result.
 73  		ctxWindow := m.cfg.Agent.ContextWindow
 74  		if ctxWindow <= 0 {
 75  			ctxWindow = 128000
 76  		}
 77  		withSystem := make([]client.Message, 0, 1+len(messages))
 78  		withSystem = append(withSystem, client.Message{Role: "system", Content: client.NewTextContent("(compaction placeholder)")})
 79  		withSystem = append(withSystem, messages...)
 80  		shaped := ctxwin.ShapeHistory(withSystem, summary, ctxWindow)
 81  
 82  		// Strip the placeholder system message from shaped result
 83  		if len(shaped) > 0 && shaped[0].Role == "system" {
 84  			shaped = shaped[1:]
 85  		}
 86  
 87  		// Rebuild MessageMeta to stay index-aligned with the new Messages.
 88  		newMeta := make([]session.MessageMeta, len(shaped))
 89  		for i := range newMeta {
 90  			newMeta[i] = session.MessageMeta{Source: "local", Timestamp: session.TimePtr(time.Now())}
 91  		}
 92  
 93  		// Update session
 94  		sess.Messages = shaped
 95  		sess.MessageMeta = newMeta
 96  		acc := usage.Snapshot()
 97  		if llm := acc.LLM; llm.LLMCalls > 0 || llm.TotalTokens > 0 || llm.CostUSD > 0 {
 98  			m.sessions.AddUsage(sess.ID, session.UsageFromAccumulated(
 99  				llm.LLMCalls, llm.InputTokens, llm.OutputTokens, llm.TotalTokens,
100  				llm.CostUSD, llm.CacheReadTokens, llm.CacheCreationTokens, llm.CacheCreation5mTokens, llm.CacheCreation1hTokens, llm.Model,
101  				acc.ToolCalls, acc.ToolCostUSD,
102  			))
103  		}
104  		m.sessions.Save()
105  
106  		afterTokens := ctxwin.EstimateTokens(shaped)
107  
108  		// Truncate summary for display
109  		displaySummary := summary
110  		if r := []rune(displaySummary); len(r) > 200 {
111  			displaySummary = string(r[:200]) + "..."
112  		}
113  
114  		return compactDoneMsg{
115  			beforeTokens: beforeTokens,
116  			afterTokens:  afterTokens,
117  			summary:      displaySummary,
118  		}
119  	}
120  }
121  
122  func formatCompactResult(msg compactDoneMsg) string {
123  	dimStyle := lipgloss.NewStyle().Foreground(lipgloss.Color("243"))
124  	var sb strings.Builder
125  	sb.WriteString(dimStyle.Render(fmt.Sprintf("  Context compressed: ~%s → ~%s tokens",
126  		formatTokenCount(msg.beforeTokens), formatTokenCount(msg.afterTokens))))
127  	sb.WriteString("\n")
128  	if msg.summary != "" {
129  		sb.WriteString(dimStyle.Render("  Summary: " + msg.summary))
130  	}
131  	return sb.String()
132  }