/ scripts / resonance_engine.py
resonance_engine.py
  1  #!/usr/bin/env python3
  2  """
  3  Resonance Engine - Connection discovery with human confirmation
  4  
  5  This script implements the philosophical concepts discovered in Sovereign_OS:
  6  - Rhizome (Deleuze): Any point can connect to any other point
  7  - Spontaneous Order (Hayek): Connections emerge from content, not design
  8  - A4 Ergodicity: Prevent orphan ruin before it happens
  9  - Resonance-driven-linking: Automation SUGGESTS; the human CONFIRMS; the link EMERGES
 10  
 11  Usage:
 12      python scripts/resonance_engine.py <target_file> [--threshold=0.3]
 13      python scripts/resonance_engine.py <target_file> --apply
 14      python scripts/resonance_engine.py --fix-orphans [--apply]
 15  
 16  DEFAULT MODE (suggest-only):
 17      - Analyzes content for key terms and concepts
 18      - Finds pages with overlapping semantic content
 19      - SUGGESTS wiki-links for human review
 20      - Creates bidirectional suggestions (rhizomatic, not hierarchical)
 21  
 22  WITH --apply:
 23      - Actually inserts the wiki-links into Related sections
 24      - Use after reviewing suggestions
 25  """
 26  
 27  import os
 28  import re
 29  import sys
 30  from collections import Counter, defaultdict
 31  from pathlib import Path
 32  from typing import Dict, List, Set, Tuple, Optional
 33  import argparse
 34  
 35  # Directories to scan
 36  SCAN_DIRS = ['docs', 'patterns', 'sessions', 'dashboards']
 37  SKIP_DIRS = {'.git', 'node_modules', '__pycache__', 'templates'}
 38  SKIP_FILES = {'README.md', 'CHANGELOG.md'}
 39  
 40  # Stop words - common words that don't indicate semantic connection
 41  STOP_WORDS = {
 42      'the', 'a', 'an', 'and', 'or', 'but', 'in', 'on', 'at', 'to', 'for',
 43      'of', 'with', 'by', 'from', 'as', 'is', 'was', 'are', 'were', 'been',
 44      'be', 'have', 'has', 'had', 'do', 'does', 'did', 'will', 'would',
 45      'could', 'should', 'may', 'might', 'must', 'shall', 'can', 'need',
 46      'this', 'that', 'these', 'those', 'it', 'its', 'they', 'them',
 47      'their', 'we', 'us', 'our', 'you', 'your', 'he', 'she', 'him', 'her',
 48      'not', 'no', 'yes', 'all', 'any', 'each', 'every', 'both', 'few',
 49      'more', 'most', 'other', 'some', 'such', 'only', 'same', 'so', 'than',
 50      'too', 'very', 'just', 'also', 'now', 'here', 'there', 'when', 'where',
 51      'why', 'how', 'what', 'which', 'who', 'whom', 'if', 'then', 'else',
 52      'because', 'while', 'although', 'though', 'unless', 'until', 'after',
 53      'before', 'since', 'during', 'about', 'into', 'through', 'between',
 54      'under', 'over', 'above', 'below', 'up', 'down', 'out', 'off', 'away',
 55      'back', 'again', 'still', 'already', 'always', 'never', 'ever', 'often',
 56      'usually', 'sometimes', 'related', 'see', 'like', 'using', 'used',
 57      'shape', 'principle', 'axiom', 'pattern', 'protocol', 'section',
 58  }
 59  
 60  # High-value terms - concepts that indicate strong semantic connection
 61  HIGH_VALUE_TERMS = {
 62      # Axioms
 63      'boundary', 'markov', 'blanket', 'integration', 'telos', 'recognition',
 64      'life', 'death', 'navigation', 'pole', 'ergodic', 'ruin', 'survival',
 65      # Philosophers
 66      'deleuze', 'hayek', 'taleb', 'friston', 'dellanna', 'guattari',
 67      # Concepts
 68      'rhizome', 'deterritorialization', 'spontaneous', 'order', 'emergence',
 69      'entropy', 'free-energy', 'active-inference', 'autopoiesis',
 70      'feedback', 'loop', 'adaptation', 'signal', 'compression',
 71      # Technical
 72      'graph', 'wiki-link', 'orphan', 'connection', 'resonance',
 73  }
 74  
 75  # =============================================================================
 76  # AXIOM SEMANTIC FIELDS
 77  # Imported from principle_steward.py insights
 78  # =============================================================================
 79  
 80  AXIOM_FIELDS = {
 81      "A0": {
 82          "name": "Boundary Operation",
 83          "strong": {'boundary', 'blanket', 'markov', 'distinction', 'observer',
 84                     'sovereign', 'sovereignty', 'sensory', 'active', 'internal', 'external'},
 85          "medium": {'permeable', 'permeability', 'flow', 'flows', 'structure', 'content',
 86                     'scale', 'fractal', 'nested', 'inside', 'outside', 'crosses'},
 87          "weak": {'separate', 'division', 'filter', 'membrane'},
 88      },
 89      "A1": {
 90          "name": "Telos of Integration",
 91          "strong": {'integration', 'integrate', 'integrating', 'connection', 'connect',
 92                     'binding', 'isolation', 'isolated', 'isolating', 'relation'},
 93          "medium": {'tribe', 'tribal', 'collective', 'unify', 'unity', 'unified',
 94                     'merge', 'merging', 'together', 'shared'},
 95          "weak": {'join', 'joining', 'link', 'linking', 'collaborate'},
 96      },
 97      "A2": {
 98          "name": "Recognition of Life",
 99          "strong": {'life', 'alive', 'living', 'death', 'dead', 'dying', 'primitive',
100                     'calcified', 'sclerosis', 'ornament', 'cain', 'blindness'},
101          "medium": {'recognize', 'recognition', 'beauty', 'beautiful', 'resonance',
102                     'simple', 'complex', 'accumulated', 'cruft', 'golden', 'carpenter'},
103          "weak": {'fresh', 'stale', 'responsive', 'rigid', 'clear', 'opaque'},
104      },
105      "A3": {
106          "name": "Dynamic Pole Navigation",
107          "strong": {'pole', 'poles', 'dyad', 'tension', 'navigate', 'navigation',
108                     'oscillate', 'oscillation', 'movement', 'shadow', 'dynamic', 'static'},
109          "medium": {'extreme', 'extremes', 'balance', 'between', 'spectrum',
110                     'context', 'contextual', 'invert', 'inversion'},
111          "weak": {'middle', 'midpoint', 'swing', 'shift', 'pendulum'},
112      },
113      "A4": {
114          "name": "Ergodic Asymmetry",
115          "strong": {'ruin', 'ruinous', 'catastrophe', 'catastrophic', 'terminal',
116                     'irreversible', 'unrecoverable', 'survival', 'survive', 'ergodic',
117                     'asymmetry', 'asymmetric', 'compound'},
118          "medium": {'risk', 'risky', 'dangerous', 'cheap', 'expensive', 'cost',
119                     'rebuild', 'rewrite', 'redo', 'reversible', 'undo', 'rollback'},
120          "weak": {'careful', 'caution', 'fail', 'failure', 'recover', 'backup'},
121      },
122  }
123  
124  
125  def calculate_axiom_resonance(terms: Counter, axiom_id: str = None) -> Dict[str, float]:
126      """
127      Calculate resonance with each axiom's semantic field.
128  
129      Returns dict of axiom_id -> resonance score (0-1).
130      If axiom_id specified, returns only that axiom's score.
131      """
132      word_set = set(terms.keys())
133      results = {}
134  
135      axioms_to_check = [axiom_id] if axiom_id else AXIOM_FIELDS.keys()
136  
137      for aid in axioms_to_check:
138          if aid not in AXIOM_FIELDS:
139              continue
140  
141          field = AXIOM_FIELDS[aid]
142          strong_matches = word_set & field["strong"]
143          medium_matches = word_set & field["medium"]
144          weak_matches = word_set & field["weak"]
145  
146          # Weighted score
147          score = (
148              len(strong_matches) * 0.15 +
149              len(medium_matches) * 0.08 +
150              len(weak_matches) * 0.03
151          )
152  
153          # Cap at 1.0
154          results[aid] = min(1.0, score)
155  
156      return results
157  
158  
159  def get_dominant_axiom(terms: Counter) -> Optional[Tuple[str, float]]:
160      """
161      Find which axiom a page resonates with most strongly.
162  
163      Returns (axiom_id, score) or None if no significant resonance.
164      """
165      scores = calculate_axiom_resonance(terms)
166      if not scores:
167          return None
168  
169      best = max(scores.items(), key=lambda x: x[1])
170      if best[1] >= 0.15:  # Minimum threshold for "dominant"
171          return best
172  
173      return None
174  
175  
176  def normalize_link(link: str) -> str:
177      """Normalize a wiki-link to comparable format."""
178      name = link.split('/')[-1].lower()
179      if name.endswith('.md'):
180          name = name[:-3]
181      return name.replace(' ', '-')
182  
183  
184  def file_to_link_name(filepath: Path) -> str:
185      """Convert filepath to wiki-link name."""
186      return normalize_link(filepath.stem)
187  
188  
189  def extract_wiki_links(content: str) -> Set[str]:
190      """Extract all [[wiki-links]] from content."""
191      pattern = r'\[\[([^\]|]+)(?:\|[^\]]+)?\]\]'
192      return {normalize_link(m) for m in re.findall(pattern, content)}
193  
194  
195  def extract_terms(content: str) -> Counter:
196      """Extract weighted terms from content."""
197      # Remove code blocks
198      content = re.sub(r'```[\s\S]*?```', '', content)
199      # Remove wiki-links (we handle these separately)
200      content = re.sub(r'\[\[[^\]]+\]\]', '', content)
201      # Remove URLs
202      content = re.sub(r'https?://\S+', '', content)
203      # Extract words
204      words = re.findall(r'[a-zA-Z][a-zA-Z-]*[a-zA-Z]|[a-zA-Z]', content.lower())
205  
206      # Count with weighting
207      terms = Counter()
208      for word in words:
209          if word in STOP_WORDS or len(word) < 3:
210              continue
211          weight = 3 if word in HIGH_VALUE_TERMS else 1
212          terms[word] += weight
213  
214      return terms
215  
216  
217  def extract_title(content: str) -> str:
218      """Extract page title from content."""
219      match = re.search(r'^# (.+)$', content, re.MULTILINE)
220      return match.group(1) if match else "Untitled"
221  
222  
223  def scan_pages(base_path: Path) -> Dict[str, dict]:
224      """Scan all markdown pages and extract metadata."""
225      pages = {}
226  
227      for scan_dir in SCAN_DIRS:
228          dir_path = base_path / scan_dir
229          if not dir_path.exists():
230              continue
231  
232          for filepath in dir_path.rglob('*.md'):
233              if any(skip in filepath.parts for skip in SKIP_DIRS):
234                  continue
235              if filepath.name in SKIP_FILES:
236                  continue
237  
238              try:
239                  content = filepath.read_text(encoding='utf-8')
240              except Exception:
241                  continue
242  
243              link_name = file_to_link_name(filepath)
244              pages[link_name] = {
245                  'path': filepath,
246                  'content': content,
247                  'title': extract_title(content),
248                  'terms': extract_terms(content),
249                  'outbound': extract_wiki_links(content),
250                  'has_related': bool(re.search(r'^## Related', content, re.MULTILINE)),
251              }
252  
253      return pages
254  
255  
256  def calculate_resonance(
257      source_terms: Counter,
258      target_terms: Counter,
259      axiom_filter: str = None
260  ) -> float:
261      """
262      Calculate semantic resonance between two pages.
263  
264      Returns a score from 0 to 1 indicating connection strength.
265      This implements Hayek's insight: connections emerge from shared knowledge.
266  
267      If axiom_filter is specified, only considers pages that share resonance
268      with that specific axiom's semantic field.
269      """
270      if not source_terms or not target_terms:
271          return 0.0
272  
273      # If axiom filter specified, check both pages resonate with that axiom
274      if axiom_filter:
275          source_axiom_scores = calculate_axiom_resonance(source_terms, axiom_filter)
276          target_axiom_scores = calculate_axiom_resonance(target_terms, axiom_filter)
277  
278          source_axiom_score = source_axiom_scores.get(axiom_filter, 0)
279          target_axiom_score = target_axiom_scores.get(axiom_filter, 0)
280  
281          # Both must have minimum axiom resonance
282          if source_axiom_score < 0.10 or target_axiom_score < 0.10:
283              return 0.0
284  
285      # Find shared terms
286      shared = set(source_terms.keys()) & set(target_terms.keys())
287      if not shared:
288          return 0.0
289  
290      # Calculate weighted overlap
291      shared_weight = sum(
292          min(source_terms[term], target_terms[term])
293          for term in shared
294      )
295  
296      # Normalize by geometric mean of total weights
297      source_total = sum(source_terms.values())
298      target_total = sum(target_terms.values())
299  
300      if source_total == 0 or target_total == 0:
301          return 0.0
302  
303      normalizer = (source_total * target_total) ** 0.5
304      score = shared_weight / normalizer
305  
306      # Boost for high-value term overlap
307      high_value_shared = shared & HIGH_VALUE_TERMS
308      if high_value_shared:
309          score *= (1 + 0.2 * len(high_value_shared))
310  
311      # Boost for shared axiom semantic field overlap
312      if axiom_filter and axiom_filter in AXIOM_FIELDS:
313          field = AXIOM_FIELDS[axiom_filter]
314          all_axiom_terms = field["strong"] | field["medium"] | field["weak"]
315          axiom_shared = shared & all_axiom_terms
316          if axiom_shared:
317              score *= (1 + 0.3 * len(axiom_shared))
318  
319      return min(1.0, score)
320  
321  
322  def find_resonant_pages(
323      target: str,
324      pages: Dict[str, dict],
325      threshold: float = 0.3,
326      max_results: int = 10,
327      axiom_filter: str = None
328  ) -> List[Tuple[str, float, Optional[str]]]:
329      """
330      Find pages that resonate with the target page.
331  
332      This is the rhizome in action: any point finding its connections
333      to any other point, based on semantic content not hierarchy.
334  
335      If axiom_filter is specified, only returns pages that both share
336      resonance with that axiom's semantic field.
337  
338      Returns list of (name, score, dominant_axiom) tuples.
339      """
340      if target not in pages:
341          return []
342  
343      target_data = pages[target]
344      target_terms = target_data['terms']
345      existing_links = target_data['outbound']
346  
347      resonances = []
348      for name, data in pages.items():
349          if name == target:
350              continue
351          # Skip if already linked
352          if name in existing_links:
353              continue
354  
355          score = calculate_resonance(target_terms, data['terms'], axiom_filter)
356          if score >= threshold:
357              # Find dominant axiom for context
358              dominant = get_dominant_axiom(data['terms'])
359              axiom_tag = dominant[0] if dominant else None
360              resonances.append((name, score, axiom_tag))
361  
362      # Sort by resonance strength
363      resonances.sort(key=lambda x: x[1], reverse=True)
364      return resonances[:max_results]
365  
366  
367  def find_orphans(pages: Dict[str, dict]) -> Set[str]:
368      """Find pages with no inbound links (A4: at risk of ruin)."""
369      inbound = defaultdict(set)
370      for source, data in pages.items():
371          for target in data['outbound']:
372              inbound[target].add(source)
373  
374      return {name for name in pages if not inbound.get(name)}
375  
376  
377  def insert_connection(
378      filepath: Path,
379      content: str,
380      target_name: str,
381      target_title: str,
382      resonance_score: float
383  ) -> str:
384      """
385      Insert a wiki-link connection into the page.
386  
387      This is the ACTION - not just reporting, but modifying.
388      Implements tight feedback loops: discovery → action.
389      """
390      link = f"[[{target_name}]]"
391  
392      # Check if Related section exists
393      related_match = re.search(r'^## Related\s*\n', content, re.MULTILINE)
394  
395      if related_match:
396          # Find end of Related section (next ## or end of file)
397          section_start = related_match.end()
398          next_section = re.search(r'\n## ', content[section_start:])
399  
400          if next_section:
401              insert_pos = section_start + next_section.start()
402          else:
403              insert_pos = len(content)
404  
405          # Insert before end of section
406          # Find last non-empty line in section
407          section_content = content[section_start:insert_pos]
408  
409          # Add the new link
410          new_link_line = f"- {link} - resonance: {resonance_score:.0%}\n"
411  
412          # Insert at end of Related section
413          content = content[:insert_pos].rstrip() + "\n" + new_link_line + "\n" + content[insert_pos:].lstrip()
414      else:
415          # Create Related section at end
416          related_section = f"\n---\n\n## Related\n\n- {link} - resonance: {resonance_score:.0%}\n"
417          content = content.rstrip() + related_section + "\n"
418  
419      return content
420  
421  
422  def create_bidirectional_link(
423      pages: Dict[str, dict],
424      source: str,
425      target: str,
426      score: float,
427      dry_run: bool = False
428  ) -> Tuple[bool, bool]:
429      """
430      Create bidirectional connection between pages.
431  
432      This implements the rhizome: connections are not hierarchical.
433      If A links to B, B should link to A.
434      """
435      source_modified = False
436      target_modified = False
437  
438      source_data = pages[source]
439      target_data = pages[target]
440  
441      # Add link from source to target (if not exists)
442      if target not in source_data['outbound']:
443          new_content = insert_connection(
444              source_data['path'],
445              source_data['content'],
446              target,
447              target_data['title'],
448              score
449          )
450          if not dry_run:
451              source_data['path'].write_text(new_content, encoding='utf-8')
452              source_data['content'] = new_content
453              source_data['outbound'].add(target)
454          source_modified = True
455  
456      # Add link from target to source (bidirectional - rhizomatic)
457      if source not in target_data['outbound']:
458          new_content = insert_connection(
459              target_data['path'],
460              target_data['content'],
461              source,
462              source_data['title'],
463              score
464          )
465          if not dry_run:
466              target_data['path'].write_text(new_content, encoding='utf-8')
467              target_data['content'] = new_content
468              target_data['outbound'].add(source)
469          target_modified = True
470  
471      return source_modified, target_modified
472  
473  
474  def process_single_page(
475      target_file: Path,
476      base_path: Path,
477      threshold: float,
478      suggest_only: bool = True,
479      axiom_filter: str = None
480  ) -> int:
481      """Process a single page and discover its connections.
482  
483      Default behavior is suggest-only (resonance-driven-linking pattern).
484      Use --apply to actually write changes.
485      Use --axiom to filter by specific axiom semantic field.
486      """
487      mode_label = "SUGGEST MODE" if suggest_only else "APPLY MODE"
488      print(f"[{mode_label}] Scanning graph from {base_path}...\n")
489      pages = scan_pages(base_path)
490  
491      # Find target in pages
492      target_name = file_to_link_name(target_file)
493  
494      if target_name not in pages:
495          # Load the target file directly
496          try:
497              content = target_file.read_text(encoding='utf-8')
498              pages[target_name] = {
499                  'path': target_file,
500                  'content': content,
501                  'title': extract_title(content),
502                  'terms': extract_terms(content),
503                  'outbound': extract_wiki_links(content),
504                  'has_related': bool(re.search(r'^## Related', content, re.MULTILINE)),
505              }
506          except Exception as e:
507              print(f"Error reading {target_file}: {e}", file=sys.stderr)
508              return 1
509  
510      print(f"Finding resonances for: {pages[target_name]['title']}")
511      print(f"Path: {target_file}")
512      print(f"Threshold: {threshold:.0%}")
513  
514      # Show axiom context
515      target_axiom_scores = calculate_axiom_resonance(pages[target_name]['terms'])
516      if target_axiom_scores:
517          dominant = get_dominant_axiom(pages[target_name]['terms'])
518          if dominant:
519              print(f"Dominant axiom: {dominant[0]} ({AXIOM_FIELDS[dominant[0]]['name']}) - {dominant[1]:.0%}")
520  
521      if axiom_filter:
522          print(f"Filter: {axiom_filter} ({AXIOM_FIELDS[axiom_filter]['name']}) only")
523      print()
524  
525      resonances = find_resonant_pages(target_name, pages, threshold, axiom_filter=axiom_filter)
526  
527      if not resonances:
528          print("No resonant pages found above threshold.")
529          return 0
530  
531      print(f"Found {len(resonances)} resonant connections:\n")
532  
533      connections_made = 0
534      for name, score, axiom_tag in resonances:
535          data = pages[name]
536          axiom_label = f" [{axiom_tag}]" if axiom_tag else ""
537          print(f"  [{score:.0%}]{axiom_label} {data['title']}")
538          print(f"        → {data['path']}")
539  
540          if not suggest_only:
541              src_mod, tgt_mod = create_bidirectional_link(
542                  pages, target_name, name, score, dry_run=False
543              )
544              if src_mod:
545                  print(f"        ✓ APPLIED: [[{name}]] → source")
546                  connections_made += 1
547              if tgt_mod:
548                  print(f"        ✓ APPLIED: [[{target_name}]] → target (bidirectional)")
549                  connections_made += 1
550          else:
551              print(f"        💡 Suggested: [[{name}]] ↔ [[{target_name}]]")
552          print()
553  
554      if suggest_only:
555          print(f"\n📋 SUGGESTIONS: {len(resonances)} bidirectional connections discovered")
556          print(f"   To apply: python {__file__} {target_file} --apply")
557          if axiom_filter:
558              print(f"   Filter: {axiom_filter} semantic field")
559          print(f"\n   Pattern: Automation SUGGESTS; the human CONFIRMS; the link EMERGES")
560      else:
561          print(f"\n✓ APPLIED: {connections_made} connections created")
562  
563      return 0
564  
565  
566  def fix_orphans(base_path: Path, threshold: float, suggest_only: bool = True) -> int:
567      """
568      Find and suggest connections for all orphan pages.
569  
570      This is A4 in action: prevent ruin (orphan state) before it calcifies.
571      Default is suggest-only mode (resonance-driven-linking pattern).
572      """
573      mode_label = "SUGGEST MODE" if suggest_only else "APPLY MODE"
574      print(f"[{mode_label}] Scanning graph from {base_path}...\n")
575      pages = scan_pages(base_path)
576      orphans = find_orphans(pages)
577  
578      print(f"Found {len(orphans)} orphan pages (no inbound links)\n")
579      print("=" * 60)
580  
581      if not orphans:
582          print("No orphans found - graph is healthy!")
583          return 0
584  
585      total_connections = 0
586      orphans_with_suggestions = 0
587  
588      for orphan in sorted(orphans):
589          if orphan not in pages:
590              continue
591  
592          data = pages[orphan]
593          print(f"\n📄 {data['title']}")
594          print(f"   {data['path']}")
595  
596          resonances = find_resonant_pages(orphan, pages, threshold, max_results=3)
597  
598          if not resonances:
599              print("   ⚠ No resonances found - needs manual connection")
600              continue
601  
602          connections_for_orphan = 0
603          for name, score in resonances:
604              target_data = pages[name]
605  
606              if not suggest_only:
607                  src_mod, tgt_mod = create_bidirectional_link(
608                      pages, orphan, name, score, dry_run=False
609                  )
610                  if src_mod or tgt_mod:
611                      print(f"   ✓ APPLIED [{score:.0%}] ↔ {target_data['title']}")
612                      connections_for_orphan += 1
613                      total_connections += (1 if src_mod else 0) + (1 if tgt_mod else 0)
614              else:
615                  print(f"   💡 [{score:.0%}] → {target_data['title']}")
616  
617          if connections_for_orphan > 0 or suggest_only:
618              orphans_with_suggestions += 1
619  
620      print("\n" + "=" * 60)
621      if suggest_only:
622          print(f"\n📋 SUGGESTIONS: {orphans_with_suggestions} orphans have suggested connections")
623          print(f"   To apply: python {__file__} --fix-orphans --apply")
624          print(f"\n   Pattern: Automation SUGGESTS; the human CONFIRMS; the link EMERGES")
625      else:
626          print(f"\n✓ APPLIED: Fixed {orphans_with_suggestions} orphans with {total_connections} connections")
627  
628      return 0
629  
630  
631  def main():
632      parser = argparse.ArgumentParser(
633          description='Resonance Engine - Connection discovery with human confirmation',
634          epilog='Pattern: Automation SUGGESTS; the human CONFIRMS; the link EMERGES'
635      )
636      parser.add_argument(
637          'target',
638          nargs='?',
639          type=Path,
640          help='Target file to find connections for'
641      )
642      parser.add_argument(
643          '--fix-orphans',
644          action='store_true',
645          help='Find and suggest connections for all orphan pages'
646      )
647      parser.add_argument(
648          '--apply',
649          action='store_true',
650          help='Actually insert links (default: suggest-only mode)'
651      )
652      parser.add_argument(
653          '--dry-run', '-n',
654          action='store_true',
655          help='(Deprecated) Same as default suggest-only mode'
656      )
657      parser.add_argument(
658          '--threshold', '-t',
659          type=float,
660          default=0.25,
661          help='Minimum resonance score (0-1, default: 0.25)'
662      )
663      parser.add_argument(
664          '--axiom', '-a',
665          type=str,
666          choices=['A0', 'A1', 'A2', 'A3', 'A4'],
667          help='Filter by axiom semantic field (e.g., A0 for Boundary Operation)'
668      )
669      parser.add_argument(
670          '--path', '-p',
671          type=Path,
672          default=Path(__file__).parent.parent,
673          help='Path to Sovereign_OS repo'
674      )
675  
676      args = parser.parse_args()
677  
678      # Default is suggest-only; --apply is required to write changes
679      # (This implements resonance-driven-linking: suggest, don't impose)
680      suggest_only = not args.apply
681  
682      if args.fix_orphans:
683          return fix_orphans(args.path, args.threshold, suggest_only)
684      elif args.target:
685          return process_single_page(
686              args.target, args.path, args.threshold, suggest_only, args.axiom
687          )
688      else:
689          parser.print_help()
690          return 1
691  
692  
693  if __name__ == '__main__':
694      sys.exit(main())