/ core / attention / aha_detection.py
aha_detection.py
  1  """
  2  Aha Detection Module
  3  
  4  Two types of aha moments, each with distinct phenomenology:
  5  
  6  1. DISCOVERY (weak → strong)
  7     - Surprise, expansion, "I didn't know this mattered"
  8     - Graph expands - new territory opens
  9     - Biometric: HR spike, GSR spike, pupil dilation
 10     - Example: Realizing podcast episode connects to unrelated project
 11  
 12  2. ARCHITECTURAL (strong → strong)
 13     - "Resonant clunk" - settling, clicking into place
 14     - Graph contracts - disparate parts collapse together
 15     - Biometric: HRV increase (parasympathetic), slow breath
 16     - Example: Two systems you've been building finally unified
 17  
 18  The "resonant clunk" is the sound of graph distance shrinking.
 19  Space collapsing. Things that were far apart are now adjacent.
 20  
 21  Integration with Attention:
 22  - Attention tracking shows WHERE you're looking
 23  - Aha detection shows WHEN insight crystallizes
 24  - Together: both the gaze and the moment of recognition
 25  
 26  "Attention is all you need" - the aha moment IS attention
 27  crystallizing into insight.
 28  """
 29  
 30  from dataclasses import dataclass, field
 31  from datetime import datetime
 32  from typing import Optional, List, Dict, Callable, Any
 33  from enum import Enum
 34  import uuid
 35  
 36  
 37  class AhaType(Enum):
 38      """Two phenomenologically distinct types of insight."""
 39      DISCOVERY = "discovery"      # weak → strong, expansion, surprise
 40      ARCHITECTURAL = "architectural"  # strong → strong, contraction, settling
 41  
 42  
 43  class AhaSurfaceTarget(Enum):
 44      """Where aha moments can be surfaced."""
 45      INLINE_MARKDOWN = "inline"   # Inject into current content
 46      AHA_LOG = "aha_log"          # Append to dedicated aha log
 47      AUDIO_ANNOUNCE = "audio"     # Text-to-speech announcement
 48      NAV_MARKER = "nav"           # Add to navigation/breadcrumb
 49      DAILY_SUMMARY = "daily"      # Include in daily note synthesis
 50  
 51  
 52  @dataclass
 53  class BiometricSnapshot:
 54      """
 55      Biometric state at moment of potential aha.
 56  
 57      Used for calibration and validation - the body knows
 58      before the mind articulates.
 59  
 60      EEG fields (from Mind Monitor via EEG Bridge):
 61      - eeg_focus: Derived from alpha/theta ratio
 62      - eeg_flow: Derived from band pattern matching
 63      - eeg_engagement: Derived from beta+gamma elevation
 64      - eeg_bands: Raw band powers (delta, theta, alpha, beta, gamma)
 65      """
 66      timestamp: datetime
 67      heart_rate: Optional[float] = None
 68      heart_rate_baseline: Optional[float] = None  # Rolling average
 69      gsr: Optional[float] = None  # Galvanic skin response
 70      gsr_baseline: Optional[float] = None
 71      hrv: Optional[float] = None  # Heart rate variability
 72      hrv_baseline: Optional[float] = None
 73      pupil_diameter: Optional[float] = None
 74      breath_rate: Optional[float] = None
 75  
 76      # EEG-derived metrics (from Mind Monitor via EEG Bridge)
 77      eeg_focus: Optional[float] = None       # 0-1, from alpha/theta ratio
 78      eeg_flow: Optional[float] = None        # 0-1, flow state indicator
 79      eeg_engagement: Optional[float] = None  # 0-1, arousal/engagement
 80      eeg_bands: Optional[Dict[str, float]] = None  # Raw: delta, theta, alpha, beta, gamma
 81  
 82      @property
 83      def hr_spike(self) -> Optional[float]:
 84          """Heart rate spike relative to baseline."""
 85          if self.heart_rate and self.heart_rate_baseline:
 86              return (self.heart_rate - self.heart_rate_baseline) / self.heart_rate_baseline
 87          return None
 88  
 89      @property
 90      def gsr_spike(self) -> Optional[float]:
 91          """GSR spike relative to baseline."""
 92          if self.gsr and self.gsr_baseline:
 93              return (self.gsr - self.gsr_baseline) / self.gsr_baseline
 94          return None
 95  
 96      @property
 97      def hrv_change(self) -> Optional[float]:
 98          """HRV change relative to baseline."""
 99          if self.hrv and self.hrv_baseline:
100              return (self.hrv - self.hrv_baseline) / self.hrv_baseline
101          return None
102  
103      @property
104      def eeg_focus_high(self) -> bool:
105          """Is EEG focus elevated? (> 0.7)"""
106          return self.eeg_focus is not None and self.eeg_focus > 0.7
107  
108      @property
109      def eeg_flow_high(self) -> bool:
110          """Is operator in flow state? (> 0.6)"""
111          return self.eeg_flow is not None and self.eeg_flow > 0.6
112  
113      @property
114      def eeg_engagement_high(self) -> bool:
115          """Is engagement/arousal elevated? (> 0.7)"""
116          return self.eeg_engagement is not None and self.eeg_engagement > 0.7
117  
118      @property
119      def has_eeg_data(self) -> bool:
120          """Check if EEG data is available."""
121          return any([
122              self.eeg_focus is not None,
123              self.eeg_flow is not None,
124              self.eeg_engagement is not None,
125          ])
126  
127  
128  @dataclass
129  class AhaCandidate:
130      """
131      A potential aha moment detected by the system.
132  
133      May be validated by biometrics, user confirmation,
134      or subsequent behavior.
135      """
136      id: str = field(default_factory=lambda: str(uuid.uuid4())[:8])
137      timestamp: datetime = field(default_factory=datetime.now)
138      aha_type: AhaType = AhaType.DISCOVERY
139  
140      # What triggered the detection
141      trigger_source: str = ""  # 'edge_prediction', 'resonance_spike', 'cross_session', etc.
142      trigger_content: str = ""  # The specific content that triggered
143  
144      # Graph context
145      source_concepts: List[str] = field(default_factory=list)  # What connected
146      target_concepts: List[str] = field(default_factory=list)  # To what
147      graph_distance_before: Optional[float] = None  # How far apart they were
148      graph_distance_after: Optional[float] = None   # How far apart now
149  
150      # Biometric validation (if available)
151      biometrics: Optional[BiometricSnapshot] = None
152      biometric_validated: bool = False
153  
154      # Confidence and validation
155      confidence: float = 0.5
156      user_validated: Optional[bool] = None  # True/False after user feedback
157  
158      # Content
159      summary: str = ""  # One-line summary of the insight
160      context: str = ""  # Fuller context
161  
162      @property
163      def graph_contraction(self) -> Optional[float]:
164          """How much the graph contracted (for architectural ahas)."""
165          if self.graph_distance_before and self.graph_distance_after:
166              return self.graph_distance_before - self.graph_distance_after
167          return None
168  
169      def validate_with_biometrics(
170          self,
171          hr_threshold: float = 0.15,
172          gsr_threshold: float = 0.20,
173          hrv_threshold: float = 0.10,
174          eeg_focus_threshold: float = 0.7,
175          eeg_engagement_threshold: float = 0.7
176      ) -> bool:
177          """
178          Validate aha candidate using biometric signals.
179  
180          Discovery ahas: HR spike + GSR spike (arousal, surprise)
181                         OR EEG: High engagement + focus spike
182          Architectural ahas: HRV increase (parasympathetic, settling)
183                             OR EEG: High flow state
184          """
185          if not self.biometrics:
186              return False
187  
188          # Track what validated the aha
189          hr_valid = False
190          gsr_valid = False
191          hrv_valid = False
192          eeg_valid = False
193  
194          if self.aha_type == AhaType.DISCOVERY:
195              # Discovery: arousal pattern
196              # Traditional: HR spike + GSR spike
197              hr_valid = (self.biometrics.hr_spike or 0) >= hr_threshold
198              gsr_valid = (self.biometrics.gsr_spike or 0) >= gsr_threshold
199  
200              # EEG alternative: high engagement + focus
201              if self.biometrics.has_eeg_data:
202                  eeg_valid = (
203                      self.biometrics.eeg_engagement_high and
204                      self.biometrics.eeg_focus_high
205                  )
206  
207              self.biometric_validated = (hr_valid and gsr_valid) or eeg_valid
208  
209          elif self.aha_type == AhaType.ARCHITECTURAL:
210              # Architectural: settling pattern
211              # Traditional: HRV increase
212              hrv_valid = (self.biometrics.hrv_change or 0) >= hrv_threshold
213  
214              # EEG alternative: high flow state (alpha dominant, calm focus)
215              if self.biometrics.has_eeg_data:
216                  eeg_valid = self.biometrics.eeg_flow_high
217  
218              self.biometric_validated = hrv_valid or eeg_valid
219  
220          # Boost confidence based on biometric validation
221          if self.biometric_validated:
222              self.confidence = min(1.0, self.confidence + 0.3)
223  
224          return self.biometric_validated
225  
226  
227  @dataclass
228  class AhaSurfacingConfig:
229      """Configuration for how ahas are surfaced."""
230      targets: List[AhaSurfaceTarget] = field(default_factory=lambda: [
231          AhaSurfaceTarget.DAILY_SUMMARY
232      ])
233      min_confidence: float = 0.6
234      require_biometric: bool = False
235      audio_enabled: bool = False
236      daily_note_path: Optional[str] = None
237  
238  
239  class AhaDetector:
240      """
241      Detects aha moments from attention and graph signals.
242  
243      Integration points:
244      - AttentionTracker: attention events feed detection
245      - EdgePredictionEngine: predicted edges trigger candidates
246      - BiometricStream: validates candidates with body signals
247      - CrossSessionTracker: cross-pollination triggers candidates
248      """
249  
250      # Thresholds for biometric validation
251      HR_SPIKE_THRESHOLD = 0.15   # 15% above baseline
252      GSR_SPIKE_THRESHOLD = 0.20  # 20% above baseline
253      HRV_INCREASE_THRESHOLD = 0.10  # 10% increase
254  
255      def __init__(self):
256          self.candidates: List[AhaCandidate] = []
257          self.validated_ahas: List[AhaCandidate] = []
258          self._callbacks: List[Callable[[AhaCandidate], None]] = []
259          self._biometric_stream: Optional[Any] = None  # BiometricStream when available
260  
261      def on_aha(self, callback: Callable[[AhaCandidate], None]) -> None:
262          """Register callback for validated aha moments."""
263          self._callbacks.append(callback)
264  
265      def detect_from_edge_prediction(
266          self,
267          source_concepts: List[str],
268          target_concepts: List[str],
269          prediction_confidence: float,
270          graph_distance: float
271      ) -> Optional[AhaCandidate]:
272          """
273          Detect potential aha from edge prediction.
274  
275          When the edge prediction engine predicts a connection
276          between concepts, this may signal an aha.
277          """
278          # Discovery: far apart concepts suddenly connected
279          if graph_distance > 3.0 and prediction_confidence > 0.7:
280              candidate = AhaCandidate(
281                  aha_type=AhaType.DISCOVERY,
282                  trigger_source='edge_prediction',
283                  source_concepts=source_concepts,
284                  target_concepts=target_concepts,
285                  graph_distance_before=graph_distance,
286                  confidence=prediction_confidence * 0.8,
287                  summary=f"Connection predicted: {source_concepts} → {target_concepts}"
288              )
289              self.candidates.append(candidate)
290              return candidate
291  
292          return None
293  
294      def detect_from_resonance_spike(
295          self,
296          concept: str,
297          resonance_before: float,
298          resonance_after: float,
299          related_concepts: List[str]
300      ) -> Optional[AhaCandidate]:
301          """
302          Detect potential aha from resonance spike.
303  
304          When resonance suddenly jumps, something clicked.
305          """
306          delta = resonance_after - resonance_before
307  
308          if delta > 0.3:  # Significant jump
309              # Determine type based on initial resonance
310              if resonance_before < 0.4:
311                  # Was weak, now strong → Discovery
312                  aha_type = AhaType.DISCOVERY
313              else:
314                  # Was strong, now stronger → Architectural
315                  aha_type = AhaType.ARCHITECTURAL
316  
317              candidate = AhaCandidate(
318                  aha_type=aha_type,
319                  trigger_source='resonance_spike',
320                  trigger_content=concept,
321                  source_concepts=[concept],
322                  target_concepts=related_concepts,
323                  confidence=min(1.0, 0.5 + delta),
324                  summary=f"Resonance spike on '{concept}': {resonance_before:.2f} → {resonance_after:.2f}"
325              )
326              self.candidates.append(candidate)
327              return candidate
328  
329          return None
330  
331      def detect_from_cross_session(
332          self,
333          topic: str,
334          session_ids: List[str],
335          combined_intensity: float
336      ) -> Optional[AhaCandidate]:
337          """
338          Detect potential aha from cross-session convergence.
339  
340          When multiple sessions converge on the same topic,
341          that's distributed cognition crystallizing.
342          """
343          if len(session_ids) >= 2 and combined_intensity > 0.6:
344              candidate = AhaCandidate(
345                  aha_type=AhaType.ARCHITECTURAL,
346                  trigger_source='cross_session',
347                  trigger_content=topic,
348                  source_concepts=[f"session_{s}" for s in session_ids],
349                  target_concepts=[topic],
350                  confidence=combined_intensity * 0.9,
351                  summary=f"Cross-session convergence on '{topic}' across {len(session_ids)} sessions"
352              )
353              self.candidates.append(candidate)
354              return candidate
355  
356          return None
357  
358      def attach_biometrics(
359          self,
360          candidate_id: str,
361          biometrics: BiometricSnapshot
362      ) -> bool:
363          """
364          Attach biometric data to a candidate and validate.
365  
366          Returns True if biometrics validate the aha.
367          """
368          for candidate in self.candidates:
369              if candidate.id == candidate_id:
370                  candidate.biometrics = biometrics
371                  validated = candidate.validate_with_biometrics(
372                      hr_threshold=self.HR_SPIKE_THRESHOLD,
373                      gsr_threshold=self.GSR_SPIKE_THRESHOLD,
374                      hrv_threshold=self.HRV_INCREASE_THRESHOLD
375                  )
376  
377                  if validated:
378                      self._promote_to_validated(candidate)
379  
380                  return validated
381  
382          return False
383  
384      def user_validate(self, candidate_id: str, is_valid: bool) -> None:
385          """
386          User explicitly validates or rejects a candidate.
387  
388          This is the ground truth that calibrates future detection.
389          """
390          for candidate in self.candidates:
391              if candidate.id == candidate_id:
392                  candidate.user_validated = is_valid
393  
394                  if is_valid:
395                      candidate.confidence = max(candidate.confidence, 0.9)
396                      self._promote_to_validated(candidate)
397                  else:
398                      candidate.confidence = min(candidate.confidence, 0.3)
399  
400                  return
401  
402      def _promote_to_validated(self, candidate: AhaCandidate) -> None:
403          """Promote candidate to validated aha and trigger callbacks."""
404          if candidate not in self.validated_ahas:
405              self.validated_ahas.append(candidate)
406  
407              for callback in self._callbacks:
408                  callback(candidate)
409  
410      def get_recent_candidates(
411          self,
412          since: Optional[datetime] = None,
413          min_confidence: float = 0.0
414      ) -> List[AhaCandidate]:
415          """Get recent aha candidates."""
416          candidates = self.candidates
417  
418          if since:
419              candidates = [c for c in candidates if c.timestamp >= since]
420  
421          if min_confidence > 0:
422              candidates = [c for c in candidates if c.confidence >= min_confidence]
423  
424          return sorted(candidates, key=lambda c: c.timestamp, reverse=True)
425  
426      def get_validated_ahas(
427          self,
428          since: Optional[datetime] = None
429      ) -> List[AhaCandidate]:
430          """Get validated aha moments."""
431          ahas = self.validated_ahas
432  
433          if since:
434              ahas = [a for a in ahas if a.timestamp >= since]
435  
436          return sorted(ahas, key=lambda a: a.timestamp, reverse=True)
437  
438  
439  class AhaManager:
440      """
441      Manages aha surfacing across multiple outputs.
442  
443      Outputs:
444      - Inline markdown: inject into current content stream
445      - Aha log: append to dedicated log file
446      - Audio announce: text-to-speech for ambient awareness
447      - Nav marker: add to navigation/breadcrumb trail
448      - Daily summary: include in daily note synthesis
449      """
450  
451      def __init__(self, config: AhaSurfacingConfig):
452          self.config = config
453          self.detector = AhaDetector()
454          self._surfaced: List[str] = []  # IDs of surfaced ahas
455  
456          # Register callback to auto-surface validated ahas
457          self.detector.on_aha(self._on_validated_aha)
458  
459      def _on_validated_aha(self, aha: AhaCandidate) -> None:
460          """Called when an aha is validated."""
461          if aha.confidence >= self.config.min_confidence:
462              if self.config.require_biometric and not aha.biometric_validated:
463                  return
464  
465              self.surface(aha)
466  
467      def surface(self, aha: AhaCandidate) -> Dict[str, bool]:
468          """
469          Surface an aha moment to configured targets.
470  
471          Returns dict of target → success.
472          """
473          if aha.id in self._surfaced:
474              return {}
475  
476          results = {}
477  
478          for target in self.config.targets:
479              try:
480                  if target == AhaSurfaceTarget.INLINE_MARKDOWN:
481                      results[target.value] = self._surface_inline(aha)
482                  elif target == AhaSurfaceTarget.AHA_LOG:
483                      results[target.value] = self._surface_to_log(aha)
484                  elif target == AhaSurfaceTarget.AUDIO_ANNOUNCE:
485                      results[target.value] = self._surface_audio(aha)
486                  elif target == AhaSurfaceTarget.NAV_MARKER:
487                      results[target.value] = self._surface_nav(aha)
488                  elif target == AhaSurfaceTarget.DAILY_SUMMARY:
489                      results[target.value] = self._surface_daily(aha)
490              except Exception as e:
491                  print(f"[AhaManager] Failed to surface to {target}: {e}")
492                  results[target.value] = False
493  
494          self._surfaced.append(aha.id)
495          return results
496  
497      def _surface_inline(self, aha: AhaCandidate) -> bool:
498          """Surface as inline markdown."""
499          # This would integrate with the current output stream
500          # For now, just print
501          emoji = "💡" if aha.aha_type == AhaType.DISCOVERY else "🔧"
502          print(f"\n{emoji} **Aha [{aha.aha_type.value}]**: {aha.summary}\n")
503          return True
504  
505      def _surface_to_log(self, aha: AhaCandidate) -> bool:
506          """Append to aha log file."""
507          # Would write to a log file
508          log_entry = f"[{aha.timestamp.isoformat()}] [{aha.aha_type.value}] {aha.summary}"
509          print(f"[AhaLog] {log_entry}")
510          return True
511  
512      def _surface_audio(self, aha: AhaCandidate) -> bool:
513          """Text-to-speech announcement."""
514          if not self.config.audio_enabled:
515              return False
516  
517          # Would use TTS system
518          print(f"[Audio] Would announce: {aha.summary}")
519          return True
520  
521      def _surface_nav(self, aha: AhaCandidate) -> bool:
522          """Add to navigation markers."""
523          # Would integrate with nav system
524          print(f"[Nav] Marker added: {aha.summary[:30]}...")
525          return True
526  
527      def _surface_daily(self, aha: AhaCandidate) -> bool:
528          """Include in daily note synthesis."""
529          if not self.config.daily_note_path:
530              return False
531  
532          # Would append to daily note
533          entry = self._format_for_daily(aha)
534          print(f"[Daily] Would add to daily note:\n{entry}")
535          return True
536  
537      def _format_for_daily(self, aha: AhaCandidate) -> str:
538          """Format aha for daily note entry."""
539          emoji = "💡" if aha.aha_type == AhaType.DISCOVERY else "🔧"
540          confidence_str = f"[{aha.confidence:.0%}]"
541  
542          lines = [
543              f"- {emoji} {confidence_str} **{aha.aha_type.value.title()}**: {aha.summary}"
544          ]
545  
546          if aha.source_concepts and aha.target_concepts:
547              lines.append(f"  - Connected: {' + '.join(aha.source_concepts[:3])} → {' + '.join(aha.target_concepts[:3])}")
548  
549          if aha.biometric_validated:
550              lines.append("  - ✓ Biometrically validated")
551  
552          return "\n".join(lines)
553  
554  
555  def create_aha_system(
556      daily_note_path: Optional[str] = None,
557      audio_enabled: bool = False
558  ) -> tuple:
559      """
560      Create the aha detection and surfacing system.
561  
562      Returns (AhaDetector, AhaManager).
563      """
564      config = AhaSurfacingConfig(
565          targets=[
566              AhaSurfaceTarget.DAILY_SUMMARY,
567              AhaSurfaceTarget.AHA_LOG,
568          ],
569          min_confidence=0.6,
570          require_biometric=False,
571          audio_enabled=audio_enabled,
572          daily_note_path=daily_note_path
573      )
574  
575      manager = AhaManager(config)
576      return manager.detector, manager
577  
578  
579  if __name__ == "__main__":
580      print("=== Aha Detection Module ===\n")
581  
582      # Create system
583      detector, manager = create_aha_system()
584  
585      # Simulate edge prediction aha
586      print("1. Simulating edge prediction detection...")
587      candidate = detector.detect_from_edge_prediction(
588          source_concepts=["attention_tracking"],
589          target_concepts=["aha_detection"],
590          prediction_confidence=0.85,
591          graph_distance=4.2
592      )
593      if candidate:
594          print(f"   Candidate: {candidate.summary}")
595          print(f"   Type: {candidate.aha_type.value}")
596          print(f"   Confidence: {candidate.confidence:.2f}")
597  
598      # Simulate resonance spike
599      print("\n2. Simulating resonance spike detection...")
600      candidate2 = detector.detect_from_resonance_spike(
601          concept="markov_blankets",
602          resonance_before=0.7,
603          resonance_after=0.95,
604          related_concepts=["attention", "membrane", "boundary"]
605      )
606      if candidate2:
607          print(f"   Candidate: {candidate2.summary}")
608          print(f"   Type: {candidate2.aha_type.value}")
609  
610      # Simulate cross-session convergence
611      print("\n3. Simulating cross-session convergence...")
612      candidate3 = detector.detect_from_cross_session(
613          topic="attention_is_all_you_need",
614          session_ids=["abc123", "def456", "ghi789"],
615          combined_intensity=0.82
616      )
617      if candidate3:
618          print(f"   Candidate: {candidate3.summary}")
619          print(f"   Type: {candidate3.aha_type.value}")
620  
621      # Simulate biometric validation
622      print("\n4. Simulating biometric validation...")
623      biometrics = BiometricSnapshot(
624          timestamp=datetime.now(),
625          heart_rate=85,
626          heart_rate_baseline=72,
627          gsr=4.2,
628          gsr_baseline=3.5,
629          hrv=65,
630          hrv_baseline=55
631      )
632      print(f"   HR spike: {biometrics.hr_spike:.1%}")
633      print(f"   GSR spike: {biometrics.gsr_spike:.1%}")
634      print(f"   HRV change: {biometrics.hrv_change:.1%}")
635  
636      if candidate:
637          validated = detector.attach_biometrics(candidate.id, biometrics)
638          print(f"   Biometric validation: {validated}")
639  
640      print("\n5. Validated ahas:")
641      for aha in detector.get_validated_ahas():
642          print(f"   - [{aha.aha_type.value}] {aha.summary}")
643  
644      print("\n'Attention is all you need' - the aha IS attention crystallizing")