/ sovereign_bot.py
sovereign_bot.py
1 import os, time, subprocess, shutil, socket 2 3 # 1. CONSTANTS & PATHS 4 VAULT = '/dev/shm/.sov_vault' 5 GHOST = '/dev/shm/.ghost' 6 TOR_DATA = '/dev/shm/.tor_sov' 7 BLOBS = f"{VAULT}/blobs" 8 MANIFEST = f"{VAULT}/manifests/registry.ollama.ai/library/test" 9 10 def setup_env(): 11 print("[*] Cleaning environment and setting up RAM vaults...") 12 for d in [BLOBS, MANIFEST, GHOST, TOR_DATA]: 13 os.makedirs(d, exist_ok=True) 14 os.chmod(TOR_DATA, 0o700) 15 16 def force_start_tor(): 17 print("[*] Nuking system Tor and launching Sovereign Tor...") 18 subprocess.run(["sudo", "systemctl", "stop", "tor"], capture_output=True) 19 subprocess.run(["sudo", "pkill", "-9", "tor"], capture_output=True) 20 # Launch Tor with zero-config and RAM-only data directory 21 tor_cmd = [ 22 "sudo", "tor", "--defaults-torrc", "/dev/null", 23 "--DataDirectory", TOR_DATA, 24 "--ControlPort", "9051", 25 "--CookieAuthentication", "0", 26 "--runasdaemon", "1" 27 ] 28 subprocess.Popen(tor_cmd, stdout=subprocess.DEVNULL) 29 30 # Wait for port 9051 to open 31 for _ in range(10): 32 with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: 33 if s.connect_ex(('127.0.0.1', 9051)) == 0: 34 print("[+] Tor Controller is LIVE.") 35 return True 36 time.sleep(1) 37 return False 38 39 def start_onion(): 40 try: 41 from stem.control import Controller 42 with Controller.from_port(port=9051) as ctrl: 43 ctrl.authenticate() 44 svc = ctrl.create_ephemeral_hidden_service({80: 11438}, await_publication=True) 45 print(f"\n[!!!] SOVEREIGNTY ESTABLISHED: {svc.service_id}.onion\n") 46 with open(f"{VAULT}/address.txt", "w") as f: f.write(f"{svc.service_id}.onion") 47 except Exception as e: 48 print(f"[!] Onion setup failed: {e}") 49 50 def hunt(): 51 print("[*] Hunter is active. Monitoring for shards...") 52 target_digest = "sha256-" + "0"*64 53 with open(f"{MANIFEST}/latest", "w") as f: 54 f.write('{"schemaVersion":2,"layers":[{"mediaType":"application/vnd.ollama.image.model","digest":"'+target_digest+'"}]}') 55 56 while True: 57 try: 58 for f in os.listdir(GHOST): 59 f_path = os.path.join(GHOST, f) 60 if f.startswith('model') and os.path.getsize(f_path) > 1024**2: # Any size > 1MB for testing 61 dest = f"{BLOBS}/{target_digest}" 62 if not os.path.exists(dest): 63 os.link(f_path, dest) 64 print(f"[!] CAPTURED SHARD: {f_path} -> {dest}") 65 # Launch the local service 66 env = os.environ.copy() 67 env["OLLAMA_HOST"] = "127.0.0.1:11438" 68 env["OLLAMA_MODELS"] = VAULT 69 subprocess.Popen(["/usr/local/bin/ollama", "serve"], env=env) 70 except: pass 71 time.sleep(0.5) 72 73 if __name__ == "__main__": 74 setup_env() 75 if force_start_tor(): 76 start_onion() 77 hunt()