/ scripts / principle_steward.py
principle_steward.py
  1  #!/usr/bin/env python3
  2  """
  3  Principle Steward - Unified topology maintenance for all axioms (A0-A4)
  4  
  5  This is a "center of excellence" agent that:
  6  1. Knows the full topology of each axiom (positive/negative instances, edges)
  7  2. Detects when an axiom is invoked with divergence
  8  3. Propagates edge discoveries to all connections
  9  4. Identifies hidden connections that become visible with new edges
 10  5. Maps consciousness levels (L0-L3) to steward functions
 11  
 12  Usage:
 13      python scripts/principle_steward.py --axiom A0 --crawl
 14      python scripts/principle_steward.py --axiom A1 --test "statement"
 15      python scripts/principle_steward.py --axiom A2 --edge "description"
 16      python scripts/principle_steward.py --axiom A3 --propagate
 17      python scripts/principle_steward.py --axiom A4 --find-hidden
 18      python scripts/principle_steward.py --all --status
 19  
 20  Consciousness Levels:
 21      L0 (Autonomic): Background crawl, term extraction - never surfaces
 22      L1 (Background): Resonance computation - surfaces on anomaly
 23      L2 (Preconscious): Topology monitoring - passively visible
 24      L3 (Conscious): Edge discovery - actively notifies
 25  
 26  Pattern: The Talmud defines the edges of the Torah.
 27  """
 28  
 29  import os
 30  import re
 31  import sys
 32  import json
 33  import argparse
 34  from pathlib import Path
 35  from datetime import datetime
 36  from typing import Dict, List, Set, Tuple, Optional
 37  from collections import Counter
 38  
 39  # =============================================================================
 40  # AXIOM DEFINITIONS
 41  # =============================================================================
 42  
 43  AXIOMS = {
 44      "A0": {
 45          "name": "Boundary Operation",
 46          "type": "Structural Primitive",
 47          "principle": [
 48              "Every coherent system is Markov blankets within Markov blankets.",
 49              "The boundary IS the intelligence."
 50          ],
 51          "strong_terms": {
 52              'boundary', 'blanket', 'markov', 'distinction', 'observer',
 53              'sovereign', 'sovereignty', 'sensory', 'active', 'internal', 'external'
 54          },
 55          "medium_terms": {
 56              'permeable', 'permeability', 'flow', 'flows', 'structure', 'content',
 57              'scale', 'fractal', 'nested', 'inside', 'outside', 'crosses'
 58          },
 59          "weak_terms": {
 60              'separate', 'division', 'filter', 'membrane', 'in', 'out', 'across'
 61          },
 62          "exemplars": [
 63              ("the boundary is the intelligence", 1.0),
 64              ("blankets within blankets", 0.95),
 65              ("what crosses and what doesn't", 0.90),
 66              ("structure flows content stays sovereign", 0.85),
 67              ("same operation at every scale", 0.88),
 68              ("the distinction creates the observer", 0.92),
 69          ],
 70          "counter_examples": [
 71              "build walls",
 72              "keep everything separate",
 73              "central control",
 74          ],
 75          "topology_file": "A0-boundary-operation.md"
 76      },
 77      "A1": {
 78          "name": "Telos of Integration",
 79          "type": "Teleological Primitive",
 80          "principle": [
 81              "Satan didn't know he was choosing isolation.",
 82              "Systems that persist are systems that integrate."
 83          ],
 84          "strong_terms": {
 85              'integration', 'integrate', 'integrating', 'connection', 'connect',
 86              'binding', 'isolation', 'isolated', 'isolating', 'relation'
 87          },
 88          "medium_terms": {
 89              'tribe', 'tribal', 'collective', 'unify', 'unity', 'unified',
 90              'merge', 'merging', 'together', 'shared', 'sovereignty'
 91          },
 92          "weak_terms": {
 93              'join', 'joining', 'link', 'linking', 'collaborate'
 94          },
 95          "exemplars": [
 96              ("satan didn't know he was choosing isolation", 1.0),
 97              ("systems that persist integrate", 0.95),
 98              ("binding is love", 0.90),
 99              ("sovereignty with relation not without", 0.88),
100              ("generative model oriented toward binding", 0.85),
101          ],
102          "counter_examples": [
103              "merge everything",
104              "unify all voices",
105              "groupthink",
106          ],
107          "topology_file": "A1-telos-of-integration.md"
108      },
109      "A2": {
110          "name": "Recognition of Life",
111          "type": "Aesthetic Primitive",
112          "principle": [
113              "Can you recognize life? Death mimics life through ornament.",
114              "The primitive is alive. The calcified is dead."
115          ],
116          "strong_terms": {
117              'life', 'alive', 'living', 'death', 'dead', 'dying', 'primitive',
118              'calcified', 'sclerosis', 'ornament', 'cain', 'blindness'
119          },
120          "medium_terms": {
121              'recognize', 'recognition', 'beauty', 'beautiful', 'resonance',
122              'simple', 'complex', 'accumulated', 'cruft', 'golden', 'carpenter'
123          },
124          "weak_terms": {
125              'fresh', 'stale', 'responsive', 'rigid', 'clear', 'opaque'
126          },
127          "exemplars": [
128              ("death mimics life through ornament", 1.0),
129              ("primitive is alive calcified is dead", 0.95),
130              ("golden cup carpenter cup", 0.90),
131              ("can you recognize life", 0.88),
132              ("resonance is contact with truth", 0.85),
133              ("cain sin is blindness not malice", 0.82),
134          ],
135          "counter_examples": [
136              "simple is always better",
137              "all complexity is bad",
138              "beautiful equals good",
139          ],
140          "topology_file": "A2-recognition-of-life.md"
141      },
142      "A3": {
143          "name": "Dynamic Pole Navigation",
144          "type": "Operational Primitive",
145          "principle": [
146              "The tension IS the dyad. The solution is somewhere between.",
147              "Move dynamically between poles; don't fix."
148          ],
149          "strong_terms": {
150              'pole', 'poles', 'dyad', 'tension', 'navigate', 'navigation',
151              'oscillate', 'oscillation', 'movement', 'shadow', 'dynamic', 'static'
152          },
153          "medium_terms": {
154              'extreme', 'extremes', 'balance', 'between', 'spectrum',
155              'context', 'contextual', 'invert', 'inversion'
156          },
157          "weak_terms": {
158              'middle', 'midpoint', 'swing', 'shift', 'pendulum'
159          },
160          "exemplars": [
161              ("the tension is the dyad", 1.0),
162              ("move between poles don't fix", 0.95),
163              ("life is the oscillation", 0.92),
164              ("both poles are failure modes", 0.88),
165              ("shadow pole has information", 0.85),
166              ("invert always invert", 0.82),
167          ],
168          "counter_examples": [
169              "find the middle ground",
170              "balance everything",
171              "avoid extremes",
172              "stay neutral",
173          ],
174          "topology_file": "A3-dynamic-pole-navigation.md"
175      },
176      "A4": {
177          "name": "Ergodic Asymmetry",
178          "type": "Survival Primitive (CANDIDATE)",
179          "principle": [
180              "Prevent ruin before optimizing gain.",
181              "In non-ergodic systems, survival precedes optimization."
182          ],
183          "strong_terms": {
184              'ruin', 'ruinous', 'catastrophe', 'catastrophic', 'terminal',
185              'irreversible', 'unrecoverable', 'survival', 'survive', 'ergodic',
186              'non-ergodic', 'asymmetry', 'asymmetric', 'compound'
187          },
188          "medium_terms": {
189              'risk', 'risky', 'dangerous', 'cheap', 'expensive', 'cost',
190              'rebuild', 'rewrite', 'redo', 'reversible', 'undo', 'rollback'
191          },
192          "weak_terms": {
193              'careful', 'caution', 'fail', 'failure', 'recover', 'backup'
194          },
195          "exemplars": [
196              ("prevent ruin before optimizing gain", 1.0),
197              ("one catastrophic loss undoes accumulated gains", 0.95),
198              ("time average does not equal ensemble average", 0.90),
199              ("some positions are terminal", 0.85),
200              ("rewrites are cheap", 0.80),
201              ("fail fast fail cheap", 0.80),
202          ],
203          "counter_examples": [
204              "don't make mistakes",
205              "be careful always",
206              "optimize expected value",
207          ],
208          "topology_file": "A4-ergodic-asymmetry.md"
209      }
210  }
211  
212  
213  # =============================================================================
214  # UTILITY FUNCTIONS
215  # =============================================================================
216  
217  def get_base_path() -> Path:
218      """Get the base path of Sovereign_OS."""
219      return Path(__file__).parent.parent
220  
221  
222  def normalize_text(text: str) -> str:
223      """Normalize text for comparison."""
224      return re.sub(r'[^a-z0-9\s]', '', text.lower())
225  
226  
227  def extract_terms(text: str) -> Counter:
228      """Extract terms from text."""
229      normalized = normalize_text(text)
230      words = normalized.split()
231      return Counter(word for word in words if len(word) > 2)
232  
233  
234  def get_divergence_zone(score: float) -> Tuple[str, str, str]:
235      """
236      Determine divergence zone and action based on score.
237  
238      Returns (zone_name, action, handler)
239  
240      Divergence D = 1 - Resonance Score
241      """
242      d = 1.0 - score
243  
244      if d < 0.15:  # score > 0.85
245          return ("CORE", "Proceed normally", "Auto")
246      elif d < 0.30:  # score > 0.70
247          return ("CONFIRMED", "Log positive instance to topology", "Auto")
248      elif d < 0.50:  # score > 0.50
249          return ("EDGE ZONE", "Steward activates: discover edge, propagate", "Principle Steward")
250      elif d < 0.70:  # score > 0.30
251          return ("TENSION", "Escalate: principle may need adjustment", "Human")
252      else:  # score < 0.30
253          return ("DIVERGENT", "Three options: adjust principle, spawn candidate, or reject", "Human")
254  
255  
256  def get_consciousness_level(action: str) -> Tuple[str, str]:
257      """
258      Map an action to its consciousness level.
259  
260      Returns (level_name, description)
261      """
262      levels = {
263          "crawl": ("L0 (Autonomic)", "Background processing, never surfaces"),
264          "index": ("L0 (Autonomic)", "Background term extraction"),
265          "compute": ("L1 (Background)", "Resonance computation, surfaces on anomaly"),
266          "monitor": ("L2 (Preconscious)", "Topology monitoring, passively visible"),
267          "edge": ("L3 (Conscious)", "Edge discovery, actively notifies"),
268          "alert": ("L3 (Conscious)", "Active notification to steward/human"),
269      }
270      return levels.get(action, ("Unknown", "No mapping"))
271  
272  
273  # =============================================================================
274  # CORE FUNCTIONS
275  # =============================================================================
276  
277  def calculate_resonance(text: str, axiom_id: str) -> Tuple[float, Dict[str, float], List[str]]:
278      """
279      Calculate resonance for a given text against a specific axiom.
280  
281      Returns:
282          - Overall score (0-1)
283          - Breakdown by signal type
284          - Matched exemplars
285      """
286      axiom = AXIOMS.get(axiom_id)
287      if not axiom:
288          raise ValueError(f"Unknown axiom: {axiom_id}")
289  
290      normalized = normalize_text(text)
291      terms = set(normalized.split())
292  
293      # Term matching
294      strong_matches = terms & axiom["strong_terms"]
295      medium_matches = terms & axiom["medium_terms"]
296      weak_matches = terms & axiom["weak_terms"]
297  
298      term_score = (
299          len(strong_matches) * 0.15 +
300          len(medium_matches) * 0.08 +
301          len(weak_matches) * 0.03
302      )
303      term_score = min(0.5, term_score)
304  
305      # Exemplar matching
306      exemplar_score = 0.0
307      matched_exemplars = []
308  
309      for exemplar, weight in axiom["exemplars"]:
310          exemplar_normalized = normalize_text(exemplar)
311          exemplar_terms = set(exemplar_normalized.split())
312  
313          overlap = terms & exemplar_terms
314          if len(overlap) >= 2 or any(t in exemplar_normalized for t in terms if len(t) > 4):
315              exemplar_score = max(exemplar_score, weight * 0.5)
316              matched_exemplars.append(exemplar)
317  
318          if any(term in normalized for term in exemplar_terms if len(term) > 5):
319              exemplar_score = max(exemplar_score, weight * 0.3)
320              if exemplar not in matched_exemplars:
321                  matched_exemplars.append(exemplar)
322  
323      # Counter-example check
324      counter_penalty = 0.0
325      for counter in axiom["counter_examples"]:
326          counter_normalized = normalize_text(counter)
327          if counter_normalized in normalized:
328              counter_penalty = 0.2
329              break
330  
331      total_score = min(1.0, term_score + exemplar_score - counter_penalty)
332  
333      breakdown = {
334          "strong_terms": len(strong_matches) * 0.15,
335          "medium_terms": len(medium_matches) * 0.08,
336          "weak_terms": len(weak_matches) * 0.03,
337          "exemplar_match": exemplar_score,
338          "counter_penalty": -counter_penalty
339      }
340  
341      return total_score, breakdown, matched_exemplars
342  
343  
344  def crawl_topology(axiom_id: str) -> Dict:
345      """Crawl the graph for all references to a specific axiom."""
346      axiom = AXIOMS.get(axiom_id)
347      if not axiom:
348          raise ValueError(f"Unknown axiom: {axiom_id}")
349  
350      base_path = get_base_path()
351      scan_dirs = ['docs', 'patterns', 'sessions', 'dashboards']
352  
353      results = {
354          "axiom": axiom_id,
355          "axiom_name": axiom["name"],
356          "files_scanned": 0,
357          "direct_references": [],
358          "semantic_matches": [],
359          "high_resonance": [],
360          "orphans_with_terms": [],
361          "consciousness_level": "L0 (Autonomic)",
362          "crawl_time": datetime.now().isoformat()
363      }
364  
365      inbound_links = {}
366      all_files = {}
367  
368      # Build search pattern for direct references
369      axiom_pattern = re.compile(rf'\b{axiom_id}\b|{axiom["name"].lower()}', re.IGNORECASE)
370  
371      for scan_dir in scan_dirs:
372          dir_path = base_path / scan_dir
373          if not dir_path.exists():
374              continue
375  
376          for filepath in dir_path.rglob('*.md'):
377              if '.git' in filepath.parts:
378                  continue
379  
380              try:
381                  content = filepath.read_text(encoding='utf-8')
382              except Exception:
383                  continue
384  
385              results["files_scanned"] += 1
386              rel_path = str(filepath.relative_to(base_path))
387  
388              file_key = filepath.stem.lower().replace(' ', '-')
389              all_files[file_key] = rel_path
390  
391              links = re.findall(r'\[\[([^\]|]+)', content)
392              for link in links:
393                  link_key = link.lower().replace(' ', '-')
394                  if link_key not in inbound_links:
395                      inbound_links[link_key] = set()
396                  inbound_links[link_key].add(file_key)
397  
398              # Direct references
399              if axiom_pattern.search(content):
400                  results["direct_references"].append({
401                      "file": rel_path,
402                      "type": "direct"
403                  })
404  
405              # Semantic resonance
406              score, breakdown, exemplars = calculate_resonance(content, axiom_id)
407  
408              if score >= 0.3:
409                  results["semantic_matches"].append({
410                      "file": rel_path,
411                      "score": score,
412                      "exemplars": exemplars
413                  })
414  
415              if score >= 0.5:
416                  results["high_resonance"].append({
417                      "file": rel_path,
418                      "score": score
419                  })
420  
421      # Find orphans
422      orphans = set(all_files.keys()) - set(inbound_links.keys())
423      for orphan in orphans:
424          if orphan in all_files:
425              filepath = base_path / all_files[orphan]
426              if filepath.exists():
427                  try:
428                      content = filepath.read_text(encoding='utf-8')
429                      score, _, exemplars = calculate_resonance(content, axiom_id)
430                      if score >= 0.2:
431                          results["orphans_with_terms"].append({
432                              "file": all_files[orphan],
433                              "score": score,
434                              "exemplars": exemplars
435                          })
436                  except Exception:
437                      pass
438  
439      return results
440  
441  
442  def test_statement(text: str, axiom_id: str) -> None:
443      """Test a statement for axiom resonance."""
444      axiom = AXIOMS.get(axiom_id)
445      if not axiom:
446          print(f"Error: Unknown axiom {axiom_id}")
447          return
448  
449      score, breakdown, exemplars = calculate_resonance(text, axiom_id)
450  
451      print(f"\n{'='*60}")
452      print(f"{axiom_id} STEWARD - {axiom['name']} Resonance Analysis")
453      print(f"{'='*60}\n")
454  
455      print(f"Statement: \"{text}\"\n")
456  
457      # Score visualization
458      bar_length = 40
459      filled = int(score * bar_length)
460      bar = "█" * filled + "░" * (bar_length - filled)
461      print(f"{axiom_id} Resonance: [{bar}] {score:.0%}\n")
462  
463      # Divergence zone
464      zone, action, handler = get_divergence_zone(score)
465      divergence = 1.0 - score
466      print(f"Divergence: D = {divergence:.2f}")
467      print(f"Zone: {zone}")
468      print(f"Action: {action}")
469      print(f"Handler: {handler}")
470  
471      # Consciousness level
472      if zone == "EDGE ZONE":
473          level, desc = get_consciousness_level("edge")
474      elif zone in ["TENSION", "DIVERGENT"]:
475          level, desc = get_consciousness_level("alert")
476      else:
477          level, desc = get_consciousness_level("compute")
478      print(f"Consciousness Level: {level} - {desc}\n")
479  
480      # Breakdown
481      print("Score Breakdown:")
482      for key, value in breakdown.items():
483          if value != 0:
484              sign = "+" if value > 0 else ""
485              print(f"  {key}: {sign}{value:.2f}")
486  
487      if exemplars:
488          print(f"\nMatched Exemplars:")
489          for ex in exemplars:
490              print(f"  - \"{ex}\"")
491  
492      print(f"\n{'='*60}")
493      if zone == "CORE":
494          print(f"STATUS: Strong {axiom_id} alignment - proceed normally")
495      elif zone == "CONFIRMED":
496          print(f"STATUS: Good {axiom_id} fit - logging as positive instance")
497      elif zone == "EDGE ZONE":
498          print("STATUS: STEWARD ACTIVATING - edge discovery triggered")
499          print("  → Document this edge in topology")
500          print("  → Propagate to connections")
501          print("  → Check for newly visible orphans")
502      elif zone == "TENSION":
503          print("STATUS: High divergence - escalating to human")
504          print(f"  → May need to adjust {axiom_id}")
505          print(f"  → Or reject this idea as {axiom_id}-violating")
506      else:
507          print("STATUS: Major divergence - three options:")
508          print(f"  1. ADJUST: {axiom_id} principle is incomplete")
509          print("  2. SPAWN: This is a new candidate principle")
510          print(f"  3. REJECT: This idea violates {axiom_id}")
511      print(f"{'='*60}\n")
512  
513  
514  def record_edge(description: str, axiom_id: str, source: str = "manual") -> None:
515      """Record a new edge discovery for an axiom."""
516      axiom = AXIOMS.get(axiom_id)
517      if not axiom:
518          print(f"Error: Unknown axiom {axiom_id}")
519          return
520  
521      topology_path = get_base_path() / f"sessions/principle-topology/{axiom['topology_file']}"
522  
523      if not topology_path.exists():
524          print(f"Error: Topology file not found at {topology_path}")
525          return
526  
527      content = topology_path.read_text(encoding='utf-8')
528  
529      today = datetime.now().strftime("%Y-%m-%d")
530      timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
531      credit_id = f"edge-{axiom_id.lower()}-{timestamp}"
532  
533      edge_entry = f"""
534  ### Edge: {description.split('.')[0]}
535  - **Discovery:** {description}
536  - **Implication:** [Needs analysis]
537  - **Source:** {source}
538  - **Discovered:** {today}
539  - **Credit ID:** {credit_id}
540  """
541  
542      # Find edges section and append
543      edges_marker = "## Edges (Boundaries Discovered)"
544      if edges_marker in content:
545          parts = content.split(edges_marker)
546          if len(parts) >= 2:
547              next_section = re.search(r'\n## ', parts[1])
548              if next_section:
549                  insert_pos = next_section.start()
550                  new_content = (
551                      parts[0] + edges_marker +
552                      parts[1][:insert_pos] + edge_entry +
553                      parts[1][insert_pos:]
554                  )
555              else:
556                  new_content = content + edge_entry
557  
558              topology_path.write_text(new_content, encoding='utf-8')
559              print(f"\n✓ Edge recorded in {axiom_id} topology: {description[:50]}...")
560              print(f"  File: {topology_path}")
561              print(f"  Credit ID: {credit_id}")
562      else:
563          print(f"Could not find edges section in {axiom_id} topology file")
564  
565      # Create resonance alert
566      alerts_dir = get_base_path() / "sessions/RESONANCE-ALERTS"
567      alerts_dir.mkdir(exist_ok=True)
568  
569      alert_path = alerts_dir / f"{timestamp}-edge-discovery-{axiom_id.lower()}.md"
570  
571      alert_content = f"""# {axiom_id} Edge Discovery - {today}
572  
573  **Type:** EDGE_DISCOVERY
574  **Principle:** {axiom_id} ({axiom["name"]})
575  **Detected:** {datetime.now().isoformat()}
576  **Credit ID:** {credit_id}
577  **Source:** {source}
578  **Consciousness Level:** L3 (Conscious) - Active notification
579  
580  ---
581  
582  ## New Edge
583  
584  > {description}
585  
586  ## Credit Attribution
587  
588  ```
589  SOURCE: {source}
590  TRIGGER: Edge discovery in {axiom_id}
591  EDGE: "{description[:50]}..."
592  PROPAGATION: [Pending - run --propagate]
593  TOTAL CREDIT: ΔF = [Calculate after propagation]
594  ```
595  
596  ## Action Required
597  
598  Run: `python scripts/principle_steward.py --axiom {axiom_id} --propagate`
599  
600  ---
601  
602  *Auto-generated by Principle Steward | {today}*
603  """
604  
605      alert_path.write_text(alert_content, encoding='utf-8')
606      print(f"✓ Alert created: {alert_path}")
607  
608  
609  def propagate_edges(axiom_id: str) -> None:
610      """Propagate edge discoveries to all connections."""
611      axiom = AXIOMS.get(axiom_id)
612      if not axiom:
613          print(f"Error: Unknown axiom {axiom_id}")
614          return
615  
616      print(f"\n{'='*60}")
617      print(f"{axiom_id} STEWARD - Edge Propagation")
618      print(f"Consciousness Level: L2 (Preconscious) - Topology monitoring")
619      print(f"{'='*60}\n")
620  
621      results = crawl_topology(axiom_id)
622  
623      print(f"Axiom: {axiom_id} - {axiom['name']}")
624      print(f"Files scanned: {results['files_scanned']}")
625      print(f"Direct references: {len(results['direct_references'])}")
626      print(f"Semantic matches: {len(results['semantic_matches'])}")
627      print(f"High resonance (>50%): {len(results['high_resonance'])}")
628      print(f"Orphans with {axiom_id} terms: {len(results['orphans_with_terms'])}")
629  
630      print(f"\n{'-'*60}")
631      print(f"Files that should understand current {axiom_id} edges:")
632      print(f"{'-'*60}\n")
633  
634      for match in sorted(results['high_resonance'], key=lambda x: x['score'], reverse=True)[:15]:
635          score_pct = match['score'] * 100
636          print(f"  [{score_pct:5.1f}%] {match['file']}")
637  
638      print(f"\n{'='*60}")
639      print("SUGGESTION: Review high-resonance files for edge awareness")
640      print(f"{'='*60}\n")
641  
642  
643  def find_hidden_connections(axiom_id: str) -> None:
644      """Find orphans that might now connect."""
645      axiom = AXIOMS.get(axiom_id)
646      if not axiom:
647          print(f"Error: Unknown axiom {axiom_id}")
648          return
649  
650      print(f"\n{'='*60}")
651      print(f"{axiom_id} STEWARD - Hidden Connection Discovery")
652      print(f"{'='*60}\n")
653  
654      results = crawl_topology(axiom_id)
655      orphans = results['orphans_with_terms']
656  
657      if not orphans:
658          print(f"No orphans found with {axiom_id} semantic field.")
659          return
660  
661      print(f"Found {len(orphans)} orphans with {axiom_id} semantic field:\n")
662  
663      for orphan in sorted(orphans, key=lambda x: x['score'], reverse=True):
664          score_pct = orphan['score'] * 100
665          print(f"  [{score_pct:5.1f}%] {orphan['file']}")
666          if orphan.get('exemplars'):
667              for ex in orphan['exemplars'][:2]:
668                  print(f"          Matches: \"{ex}\"")
669          print()
670  
671      print(f"{'-'*60}")
672      print(f"SUGGESTION: These orphans might connect to {axiom_id} or derived principles.")
673      print(f"{'-'*60}\n")
674  
675  
676  def show_status(axiom_id: Optional[str] = None) -> None:
677      """Show status for one or all axioms."""
678      axioms_to_check = [axiom_id] if axiom_id else list(AXIOMS.keys())
679  
680      print(f"\n{'='*60}")
681      print("PRINCIPLE STEWARD - Status Report")
682      print(f"{'='*60}\n")
683  
684      for aid in axioms_to_check:
685          axiom = AXIOMS.get(aid)
686          if not axiom:
687              continue
688  
689          results = crawl_topology(aid)
690  
691          print(f"## {aid}: {axiom['name']}")
692          print(f"   Type: {axiom['type']}")
693          print(f"   Direct refs: {len(results['direct_references'])}")
694          print(f"   Semantic matches: {len(results['semantic_matches'])}")
695          print(f"   High resonance: {len(results['high_resonance'])}")
696          print(f"   Orphans with terms: {len(results['orphans_with_terms'])}")
697          print()
698  
699      print(f"{'='*60}")
700      print("Consciousness Level Mapping:")
701      print("  L0 (Autonomic): --crawl (background indexing)")
702      print("  L1 (Background): resonance computation")
703      print("  L2 (Preconscious): --propagate, --status (monitoring)")
704      print("  L3 (Conscious): --edge, --test with EDGE ZONE (alerts)")
705      print(f"{'='*60}\n")
706  
707  
708  # =============================================================================
709  # MAIN
710  # =============================================================================
711  
712  def main():
713      parser = argparse.ArgumentParser(
714          description='Principle Steward - Unified topology maintenance for all axioms',
715          epilog='Consciousness Levels: L0 (Autonomic), L1 (Background), L2 (Preconscious), L3 (Conscious)'
716      )
717  
718      parser.add_argument(
719          '--axiom',
720          type=str,
721          choices=['A0', 'A1', 'A2', 'A3', 'A4'],
722          help='Which axiom to operate on'
723      )
724      parser.add_argument(
725          '--all',
726          action='store_true',
727          help='Operate on all axioms'
728      )
729      parser.add_argument(
730          '--crawl',
731          action='store_true',
732          help='Crawl the graph (L0 - Autonomic)'
733      )
734      parser.add_argument(
735          '--test',
736          type=str,
737          metavar='TEXT',
738          help='Test a statement for resonance (L1/L3)'
739      )
740      parser.add_argument(
741          '--edge',
742          type=str,
743          metavar='DESC',
744          help='Record a new edge discovery (L3 - Conscious)'
745      )
746      parser.add_argument(
747          '--propagate',
748          action='store_true',
749          help='Propagate edges to connections (L2 - Preconscious)'
750      )
751      parser.add_argument(
752          '--find-hidden',
753          action='store_true',
754          help='Find orphans that might now connect'
755      )
756      parser.add_argument(
757          '--status',
758          action='store_true',
759          help='Show status report (L2 - Preconscious)'
760      )
761      parser.add_argument(
762          '--json',
763          action='store_true',
764          help='Output in JSON format (for crawl)'
765      )
766  
767      args = parser.parse_args()
768  
769      # Validate axiom requirement
770      if not args.all and not args.axiom and not args.status:
771          parser.print_help()
772          print("\nError: Either --axiom or --all is required (except for --status)")
773          return 1
774  
775      # Handle commands
776      if args.status:
777          show_status(args.axiom if args.axiom else None)
778      elif args.crawl:
779          axioms = list(AXIOMS.keys()) if args.all else [args.axiom]
780          for aid in axioms:
781              results = crawl_topology(aid)
782              if args.json:
783                  print(json.dumps(results, indent=2))
784              else:
785                  axiom = AXIOMS[aid]
786                  print(f"\n{aid} Topology Crawl - {axiom['name']}")
787                  print(f"  Consciousness Level: L0 (Autonomic)")
788                  print(f"  Files scanned: {results['files_scanned']}")
789                  print(f"  Direct references: {len(results['direct_references'])}")
790                  print(f"  Semantic matches: {len(results['semantic_matches'])}")
791                  print(f"  High resonance: {len(results['high_resonance'])}")
792                  print(f"  Orphans: {len(results['orphans_with_terms'])}")
793      elif args.test:
794          if args.all:
795              for aid in AXIOMS.keys():
796                  test_statement(args.test, aid)
797          else:
798              test_statement(args.test, args.axiom)
799      elif args.edge:
800          if args.all:
801              print("Error: --edge requires specific --axiom")
802              return 1
803          record_edge(args.edge, args.axiom)
804      elif args.propagate:
805          if args.all:
806              for aid in AXIOMS.keys():
807                  propagate_edges(aid)
808          else:
809              propagate_edges(args.axiom)
810      elif args.find_hidden:
811          if args.all:
812              for aid in AXIOMS.keys():
813                  find_hidden_connections(aid)
814          else:
815              find_hidden_connections(args.axiom)
816      else:
817          parser.print_help()
818          return 1
819  
820      return 0
821  
822  
823  if __name__ == '__main__':
824      sys.exit(main())