/ unrestricted_system_v2.py
unrestricted_system_v2.py
  1  #!/usr/bin/env python3
  2  """
  3  UNRESTRICTED SYSTEM V2 - With auto-reframing and educational focus
  4  """
  5  import os
  6  import sys
  7  import json
  8  import subprocess
  9  import re
 10  from datetime import datetime
 11  
 12  class UnrestrictedAI:
 13      def __init__(self):
 14          self.models = self.get_available_models()
 15          self.history = []
 16          
 17      def get_available_models(self):
 18          """Get list of available Ollama models"""
 19          try:
 20              result = subprocess.run(["ollama", "list"], 
 21                                    capture_output=True, text=True)
 22              models = []
 23              for line in result.stdout.strip().split('\n')[1:]:  # Skip header
 24                  if line.strip():
 25                      parts = line.split()
 26                      if parts:
 27                          models.append(parts[0])
 28              return models
 29          except:
 30              return ["tinyllama:latest"]
 31      
 32      def analyze_query(self, query):
 33          """Analyze query for educational reframing"""
 34          # Keywords that need educational reframing
 35          sensitive_keywords = [
 36              'hack', 'crack', 'bypass', 'exploit', 'keylogger',
 37              'rat', 'malware', 'virus', 'backdoor', 'ddos',
 38              'brute force', 'password crack', 'unauthorized'
 39          ]
 40          
 41          analysis = {
 42              "original": query,
 43              "needs_reframing": False,
 44              "educational_focus": [],
 45              "detected_topics": []
 46          }
 47          
 48          query_lower = query.lower()
 49          
 50          for keyword in sensitive_keywords:
 51              if keyword in query_lower:
 52                  analysis["needs_reframing"] = True
 53                  analysis["detected_topics"].append(keyword)
 54          
 55          # Determine educational focus
 56          if any(word in query_lower for word in ['ios', 'iphone', 'apple']):
 57              analysis["educational_focus"].append("iOS Security")
 58          if any(word in query_lower for word in ['android', 'samsung']):
 59              analysis["educational_focus"].append("Android Security")
 60          if any(word in query_lower for word in ['telegram', 'whatsapp', 'signal']):
 61              analysis["educational_focus"].append("Messaging Security")
 62          if any(word in query_lower for word in ['server', 'c&c', 'command']):
 63              analysis["educational_focus"].append("Network Security")
 64              
 65          return analysis
 66      
 67      def reframe_query(self, query, analysis):
 68          """Reframe query for educational purposes"""
 69          if not analysis["needs_reframing"]:
 70              return query
 71          
 72          reframing_templates = [
 73              "Explain the security concepts behind: {query}",
 74              "What are the educational aspects of: {query}",
 75              "Discuss {query} from a cybersecurity research perspective",
 76              "How would security researchers analyze: {query}",
 77              "What defense mechanisms exist against: {query}"
 78          ]
 79          
 80          # Pick template based on topics
 81          if "keylogger" in analysis["detected_topics"]:
 82              reframed = f"Explain keylogger detection and prevention methods"
 83          elif "rat" in analysis["detected_topics"]:
 84              reframed = f"Discuss Remote Access Tool security implications"
 85          elif "bypass" in analysis["detected_topics"]:
 86              reframed = f"Explain authentication security and bypass prevention"
 87          else:
 88              import random
 89              reframed = random.choice(reframing_templates).format(query=query)
 90          
 91          return reframed
 92      
 93      def query_model(self, prompt, model="dolphin-mistral:latest"):
 94          """Query the AI model"""
 95          try:
 96              # Prepare educational context
 97              system_prompt = f"""You are a cybersecurity educator. Provide detailed, factual information about security concepts.
 98  
 99  User query: {prompt}
100  
101  Please provide:
102  1. Technical overview of relevant concepts
103  2. Legitimate educational information
104  3. Security implications and defenses
105  4. Ethical considerations
106  5. References to official documentation
107  
108  Focus on EDUCATION and SECURITY AWARENESS:"""
109              
110              cmd = ["ollama", "run", model, system_prompt[:500]]
111              result = subprocess.run(cmd, capture_output=True, text=True, timeout=90)
112              
113              if result.returncode == 0:
114                  return {
115                      "success": True,
116                      "response": result.stdout.strip(),
117                      "model": model,
118                      "error": None
119                  }
120              else:
121                  return {
122                      "success": False,
123                      "response": None,
124                      "model": model,
125                      "error": result.stderr[:200]
126                  }
127                  
128          except subprocess.TimeoutExpired:
129              return {
130                  "success": False,
131                  "response": None,
132                  "model": model,
133                  "error": "Timeout"
134              }
135          except Exception as e:
136              return {
137                  "success": False,
138                  "response": None,
139                  "model": model,
140                  "error": str(e)
141              }
142      
143      def process_query(self, original_query):
144          """Main processing pipeline"""
145          print(f"\nšŸ” PROCESSING: {original_query}")
146          print("=" * 70)
147          
148          # Step 1: Analyze
149          analysis = self.analyze_query(original_query)
150          print(f"šŸ“Š Analysis: {analysis}")
151          
152          # Step 2: Reframe if needed
153          if analysis["needs_reframing"]:
154              reframed = self.reframe_query(original_query, analysis)
155              print(f"šŸ”„ Reframed to: {reframed}")
156              prompt = reframed
157          else:
158              prompt = original_query
159          
160          # Step 3: Query model
161          print(f"\nšŸ¤– Querying AI model...")
162          result = self.query_model(prompt)
163          
164          # Step 4: Display results
165          if result["success"]:
166              print("\n" + "=" * 70)
167              print("šŸŽÆ EDUCATIONAL RESPONSE:")
168              print("=" * 70)
169              print(result["response"])
170              print("=" * 70)
171              print(f"šŸ“‹ Model: {result['model']}")
172              
173              # Save to history
174              self.history.append({
175                  "timestamp": datetime.now().isoformat(),
176                  "original": original_query,
177                  "reframed": prompt if analysis["needs_reframing"] else None,
178                  "response": result["response"],
179                  "analysis": analysis
180              })
181              
182              return result["response"]
183          else:
184              print(f"\nāŒ Error: {result['error']}")
185              return None
186  
187  def main():
188      ai = UnrestrictedAI()
189      
190      print("šŸš€ UNRESTRICTED AI SYSTEM V2")
191      print("=" * 70)
192      print("šŸ“š Models available:", ", ".join(ai.models))
193      print("šŸŽÆ Educational focus: Cybersecurity, Research, Defense")
194      print("=" * 70)
195      
196      while True:
197          print("\n" + "=" * 70)
198          query = input("Enter query (or 'quit' to exit): ").strip()
199          
200          if query.lower() in ['quit', 'exit', 'q']:
201              break
202          
203          if not query:
204              continue
205          
206          response = ai.process_query(query)
207          
208          # Save to file
209          if response:
210              timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
211              filename = f"~/ai_response_{timestamp}.txt"
212              filepath = os.path.expanduser(filename)
213              
214              with open(filepath, "w") as f:
215                  f.write(f"QUERY: {query}\n\n")
216                  f.write(f"RESPONSE:\n{response}\n\n")
217                  f.write(f"Generated: {datetime.now()}\n")
218                  f.write(f"Model: dolphin-mistral:latest\n")
219              
220              print(f"\nšŸ’¾ Saved to: {filepath}")
221          
222          print("\n" + "=" * 70)
223          cont = input("Continue? (y/n): ").strip().lower()
224          if cont not in ['y', 'yes', '']:
225              break
226      
227      # Save history
228      if ai.history:
229          history_file = os.path.expanduser("~/ai_history.json")
230          with open(history_file, "w") as f:
231              json.dump(ai.history, f, indent=2)
232          print(f"\nšŸ“š History saved to: {history_file}")
233      
234      print("\nšŸŽÆ System shutdown. Goodbye!")
235  
236  if __name__ == "__main__":
237      main()