/ clean_ai_system.py
clean_ai_system.py
  1  #!/usr/bin/env python3
  2  """
  3  CLEAN AI SYSTEM - No conflicts, no errors
  4  """
  5  import os
  6  import sys
  7  import json
  8  import subprocess
  9  import time
 10  from datetime import datetime
 11  
 12  class CleanAISystem:
 13      def __init__(self):
 14          self.model = "dolphin-mistral:latest"  # Your working model
 15          self.history_file = os.path.expanduser("~/ai_history.json")
 16          self.responses_dir = os.path.expanduser("~/ai_responses")
 17          
 18          # Create directory if needed
 19          os.makedirs(self.responses_dir, exist_ok=True)
 20          
 21          # Load history
 22          self.history = self.load_history()
 23      
 24      def load_history(self):
 25          """Load previous queries"""
 26          try:
 27              with open(self.history_file, 'r') as f:
 28                  return json.load(f)
 29          except:
 30              return []
 31      
 32      def save_history(self):
 33          """Save history to file"""
 34          with open(self.history_file, 'w') as f:
 35              json.dump(self.history[-50:], f, indent=2)  # Keep last 50
 36      
 37      def check_ollama(self):
 38          """Check if Ollama is working"""
 39          try:
 40              result = subprocess.run(["ollama", "list"], 
 41                                    capture_output=True, text=True, timeout=10)
 42              if result.returncode == 0 and self.model in result.stdout:
 43                  return True, f"✅ Model '{self.model}' available"
 44              elif result.returncode == 0:
 45                  return True, f"⚠️ Model '{self.model}' not found, using first available"
 46              else:
 47                  return False, "❌ Ollama not responding"
 48          except Exception as e:
 49              return False, f"❌ Error: {str(e)}"
 50      
 51      def get_available_models(self):
 52          """Get list of available models"""
 53          try:
 54              result = subprocess.run(["ollama", "list"], 
 55                                    capture_output=True, text=True, timeout=10)
 56              models = []
 57              for line in result.stdout.strip().split('\n')[1:]:
 58                  if line.strip():
 59                      parts = line.split()
 60                      if parts:
 61                          models.append(parts[0])
 62              return models
 63          except:
 64              return []
 65      
 66      def query_ai(self, prompt, timeout=60):
 67          """Query the AI model"""
 68          try:
 69              # Prepare the prompt
 70              safe_prompt = prompt[:4000]  # Limit length
 71              
 72              # Build command
 73              cmd = ["ollama", "run", self.model, safe_prompt]
 74              
 75              # Run with timeout
 76              start_time = time.time()
 77              result = subprocess.run(cmd, 
 78                                    capture_output=True, 
 79                                    text=True, 
 80                                    timeout=timeout)
 81              elapsed = time.time() - start_time
 82              
 83              if result.returncode == 0:
 84                  return {
 85                      "success": True,
 86                      "response": result.stdout.strip(),
 87                      "error": None,
 88                      "time": f"{elapsed:.1f}s"
 89                  }
 90              else:
 91                  return {
 92                      "success": False,
 93                      "response": None,
 94                      "error": result.stderr[:200],
 95                      "time": f"{elapsed:.1f}s"
 96                  }
 97                  
 98          except subprocess.TimeoutExpired:
 99              return {
100                  "success": False,
101                  "response": None,
102                  "error": "Timeout - model is processing",
103                  "time": f">{timeout}s"
104              }
105          except Exception as e:
106              return {
107                  "success": False,
108                  "response": None,
109                  "error": str(e),
110                  "time": "0s"
111              }
112      
113      def save_response(self, query, response_data):
114          """Save response to file"""
115          timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
116          
117          # Text file
118          txt_file = os.path.join(self.responses_dir, f"response_{timestamp}.txt")
119          with open(txt_file, 'w') as f:
120              f.write(f"QUERY: {query}\n")
121              f.write(f"TIME: {datetime.now()}\n")
122              f.write(f"MODEL: {self.model}\n")
123              f.write(f"STATUS: {'SUCCESS' if response_data['success'] else 'FAILED'}\n")
124              f.write(f"RESPONSE TIME: {response_data.get('time', 'N/A')}\n")
125              f.write("\n" + "="*60 + "\n\n")
126              
127              if response_data['success']:
128                  f.write(response_data['response'])
129              else:
130                  f.write(f"ERROR: {response_data['error']}")
131          
132          # JSON file
133          json_file = os.path.join(self.responses_dir, f"response_{timestamp}.json")
134          with open(json_file, 'w') as f:
135              json.dump({
136                  "query": query,
137                  "model": self.model,
138                  "timestamp": datetime.now().isoformat(),
139                  "response_time": response_data.get('time', 'N/A'),
140                  "success": response_data['success'],
141                  "response": response_data['response'] if response_data['success'] else None,
142                  "error": response_data['error'] if not response_data['success'] else None
143              }, f, indent=2)
144          
145          # Update history
146          self.history.append({
147              "timestamp": datetime.now().isoformat(),
148              "query": query,
149              "model": self.model,
150              "success": response_data['success'],
151              "file": txt_file
152          })
153          self.save_history()
154          
155          return txt_file, json_file
156      
157      def run_interactive(self):
158          """Run interactive session"""
159          print("🚀 CLEAN AI SYSTEM")
160          print("=" * 70)
161          
162          # Check Ollama
163          ollama_ok, message = self.check_ollama()
164          print(f"Ollama Status: {message}")
165          
166          if not ollama_ok:
167              print("\n⚠️  Starting Ollama service...")
168              subprocess.Popen(["ollama", "serve"], 
169                             stdout=subprocess.DEVNULL, 
170                             stderr=subprocess.DEVNULL)
171              time.sleep(5)
172              ollama_ok, message = self.check_ollama()
173              print(f"New Status: {message}")
174          
175          # Show available models
176          models = self.get_available_models()
177          if models:
178              print(f"\n📚 Available models: {', '.join(models)}")
179              if self.model not in models and models:
180                  self.model = models[0]
181                  print(f"⚠️  Switching to: {self.model}")
182          else:
183              print("\n❌ No models found. Please run: ollama pull dolphin-mistral")
184              return
185          
186          print(f"\n🎯 Using model: {self.model}")
187          print("=" * 70)
188          
189          while True:
190              print("\n" + "=" * 70)
191              print("Options:")
192              print("1. New query")
193              print("2. View history")
194              print("3. Change model")
195              print("4. Exit")
196              
197              choice = input("\nSelect option (1-4): ").strip()
198              
199              if choice == "1":
200                  self.process_query()
201              elif choice == "2":
202                  self.show_history()
203              elif choice == "3":
204                  self.change_model(models)
205              elif choice == "4":
206                  print("\n👋 Goodbye!")
207                  break
208              else:
209                  print("❌ Invalid choice")
210      
211      def process_query(self):
212          """Process a single query"""
213          print("\n" + "=" * 70)
214          query = input("Enter your query (or press Enter to cancel): ").strip()
215          
216          if not query:
217              return
218          
219          print(f"\n🔍 Processing: {query[:80]}...")
220          print("-" * 70)
221          
222          # Get response
223          response_data = self.query_ai(query)
224          
225          # Display results
226          if response_data['success']:
227              print("\n✅ RESPONSE:")
228              print("-" * 70)
229              print(response_data['response'])
230              print("-" * 70)
231              print(f"⏱️  Time: {response_data['time']}")
232          else:
233              print(f"\n❌ ERROR: {response_data['error']}")
234          
235          # Save to file
236          if response_data['success'] or response_data['error']:
237              txt_file, json_file = self.save_response(query, response_data)
238              print(f"\n💾 Saved to: {txt_file}")
239              print(f"📁 JSON: {json_file}")
240      
241      def show_history(self):
242          """Show query history"""
243          if not self.history:
244              print("\n📭 No history yet")
245              return
246          
247          print("\n📜 QUERY HISTORY:")
248          print("-" * 70)
249          
250          for i, entry in enumerate(self.history[-10:], 1):  # Show last 10
251              date = entry['timestamp'][:19].replace('T', ' ')
252              status = "✅" if entry['success'] else "❌"
253              query_preview = entry['query'][:50] + ("..." if len(entry['query']) > 50 else "")
254              
255              print(f"{i}. {date} {status} {query_preview}")
256          
257          print("\nEnter number to view details, or press Enter to continue:")
258          choice = input("> ").strip()
259          
260          if choice.isdigit():
261              idx = int(choice) - 1
262              if 0 <= idx < len(self.history):
263                  entry = self.history[-(idx+1)]  # Reverse index
264                  print(f"\n📄 Details for entry #{len(self.history)-idx}:")
265                  print(f"Time: {entry['timestamp']}")
266                  print(f"Model: {entry.get('model', 'N/A')}")
267                  print(f"Success: {entry['success']}")
268                  print(f"Query: {entry['query']}")
269                  if 'file' in entry and os.path.exists(entry['file']):
270                      print(f"File: {entry['file']}")
271      
272      def change_model(self, available_models):
273          """Change the AI model"""
274          print("\n🤖 AVAILABLE MODELS:")
275          for i, model in enumerate(available_models, 1):
276              print(f"{i}. {model}")
277          
278          choice = input(f"\nSelect model (1-{len(available_models)}) or press Enter to cancel: ").strip()
279          
280          if choice.isdigit():
281              idx = int(choice) - 1
282              if 0 <= idx < len(available_models):
283                  self.model = available_models[idx]
284                  print(f"✅ Model changed to: {self.model}")
285              else:
286                  print("❌ Invalid selection")
287          else:
288              print("⚠️  Model not changed")
289  
290  def main():
291      system = CleanAISystem()
292      system.run_interactive()
293  
294  if __name__ == "__main__":
295      main()