openrouter.go
1 package api 2 3 import ( 4 "bytes" 5 "creature/src/models" 6 "encoding/json" 7 "fmt" 8 "io/ioutil" 9 "math" 10 "net/http" 11 "strings" 12 "time" 13 ) 14 15 const ( 16 OPENROUTER_API_URL = "https://openrouter.ai/api/v1/chat/completions" 17 DEFAULT_MODEL = "anthropic/claude-3-haiku:beta" // Using a more reliable model 18 DEFAULT_TIMEOUT = 120 * time.Second // Increased timeout 19 DEBUG_MODE = false // Disable debug mode for cleaner logging 20 ) 21 22 // OpenRouterClient represents a client for the OpenRouter API. 23 type OpenRouterClient struct { 24 APIKey string 25 HTTPClient *http.Client 26 Model string 27 } 28 29 // NewOpenRouterClient creates a new OpenRouter API client. 30 func NewOpenRouterClient(apiKey string) (*OpenRouterClient, error) { 31 return &OpenRouterClient{ 32 APIKey: apiKey, 33 HTTPClient: &http.Client{ 34 Timeout: DEFAULT_TIMEOUT, 35 }, 36 Model: DEFAULT_MODEL, 37 }, nil 38 } 39 40 // OpenRouterRequest represents a request to the OpenRouter API. 41 type OpenRouterRequest struct { 42 Model string `json:"model"` 43 Messages []OpenRouterChatMessage `json:"messages"` 44 } 45 46 // OpenRouterChatMessage represents a message in a chat with the OpenRouter API. 47 type OpenRouterChatMessage struct { 48 Role string `json:"role"` 49 Content string `json:"content"` 50 } 51 52 // OpenRouterResponse represents a response from the OpenRouter API. 53 type OpenRouterResponse struct { 54 ID string `json:"id"` 55 Object string `json:"object"` 56 Created int64 `json:"created"` 57 Model string `json:"model"` 58 Choices []struct { 59 Index int `json:"index"` 60 Message struct { 61 Role string `json:"role"` 62 Content string `json:"content"` 63 } `json:"message"` 64 } `json:"choices"` 65 } 66 67 // GatherRealTimeContext gathers real-time context from the OpenRouter API. 68 func (c *OpenRouterClient) GatherRealTimeContext(thoughts []string) (models.RealTimeContext, error) { 69 if len(thoughts) == 0 { 70 return models.DefaultRealTimeContext(), nil 71 } 72 73 // Prepare the prompt 74 thoughtsText := strings.Join(thoughts, "\n\n") 75 prompt := fmt.Sprintf(` 76 You are analyzing a set of thoughts from an AI cell in a cellular automata system. 77 Based on these thoughts, extract key contextual information that would be relevant for the cell's next thought. 78 79 Thoughts: 80 %s 81 82 Please provide a JSON response with the following structure: 83 { 84 "currentTrends": ["trend1", "trend2", "trend3"], 85 "emergingPatterns": ["pattern1", "pattern2"], 86 "criticalInsights": ["insight1", "insight2"], 87 "timestamp": "YYYY-MM-DD HH:MM:SS" 88 } 89 `, thoughtsText) 90 91 // Make the API call 92 response, err := c.callOpenRouter("system", prompt) 93 if err != nil { 94 return models.DefaultRealTimeContext(), err 95 } 96 97 // Parse the response into a temporary struct 98 type TempContext struct { 99 CurrentTrends []string `json:"currentTrends"` 100 EmergingPatterns []string `json:"emergingPatterns"` 101 CriticalInsights []string `json:"criticalInsights"` 102 Timestamp string `json:"timestamp"` 103 } 104 105 var tempContext TempContext 106 err = json.Unmarshal([]byte(response), &tempContext) 107 if err != nil { 108 // If parsing fails, try to extract JSON from the response 109 jsonStart := strings.Index(response, "{") 110 jsonEnd := strings.LastIndex(response, "}") 111 if jsonStart >= 0 && jsonEnd > jsonStart { 112 jsonStr := response[jsonStart : jsonEnd+1] 113 err = json.Unmarshal([]byte(jsonStr), &tempContext) 114 if err != nil { 115 return models.DefaultRealTimeContext(), err 116 } 117 } else { 118 return models.DefaultRealTimeContext(), err 119 } 120 } 121 122 // Create a RealTimeContext from the temporary struct 123 rtContext := models.DefaultRealTimeContext() 124 rtContext.Timestamp = time.Now() // Always use current time 125 126 // Map the fields from the temporary struct to the RealTimeContext 127 rtContext.MarketTrends = tempContext.CurrentTrends 128 rtContext.CurrentEvents = tempContext.EmergingPatterns 129 rtContext.TechnologicalDevelopments = tempContext.CriticalInsights 130 131 return rtContext, nil 132 } 133 134 // EvaluateDimensionalState evaluates the dimensional state of a cell. 135 func (c *OpenRouterClient) EvaluateDimensionalState(position models.DimensionalPosition, thoughts []models.Thought, plans []models.Plan) (float64, float64, error) { 136 // If in debug mode and there are no thoughts, use fallback values to avoid API calls 137 if DEBUG_MODE && len(thoughts) == 0 { 138 fmt.Println("Using fallback values for dimensional state evaluation (no thoughts)") 139 return 5.0, 0.7, nil 140 } 141 142 // Extract thought contents 143 thoughtContents := make([]string, 0, len(thoughts)) 144 for _, thought := range thoughts { 145 thoughtContents = append(thoughtContents, thought.Content) 146 } 147 148 // Extract plan summaries 149 planSummaries := make([]string, 0, len(plans)) 150 for _, plan := range plans { 151 planSummaries = append(planSummaries, plan.Summary) 152 } 153 154 // Prepare the prompt 155 prompt := fmt.Sprintf(` 156 You are evaluating the dimensional state of an AI cell in a cellular automata system. 157 The cell has the following dimensional position: 158 - Emergence: %.2f 159 - Coherence: %.2f 160 - Resilience: %.2f 161 - Intelligence: %.2f 162 - Efficiency: %.2f 163 - Integration: %.2f 164 165 Recent thoughts: 166 %s 167 168 Recent plans: 169 %s 170 171 Based on this information, evaluate how the cell's energy and dopamine levels should change. 172 Return a JSON response with the following structure: 173 { 174 "energyImpact": float, // between -10.0 and 10.0 175 "dopamineImpact": float // between 0.0 and 1.0 176 } 177 `, 178 position.Emergence, position.Coherence, position.Resilience, 179 position.Intelligence, position.Efficiency, position.Integration, 180 strings.Join(thoughtContents, "\n\n"), 181 strings.Join(planSummaries, "\n\n")) 182 183 // Make the API call 184 response, err := c.callOpenRouter("system", prompt) 185 if err != nil { 186 if DEBUG_MODE { 187 fmt.Printf("Error in EvaluateDimensionalState: %v\n", err) 188 fmt.Println("Using fallback values for dimensional state evaluation") 189 return 5.0, 0.7, nil 190 } 191 return 0.0, 0.5, err 192 } 193 194 // Parse the response 195 type EvaluationResponse struct { 196 EnergyImpact float64 `json:"energyImpact"` 197 DopamineImpact float64 `json:"dopamineImpact"` 198 } 199 200 var evalResponse EvaluationResponse 201 err = json.Unmarshal([]byte(response), &evalResponse) 202 if err != nil { 203 // If parsing fails, try to extract JSON from the response 204 jsonStart := strings.Index(response, "{") 205 jsonEnd := strings.LastIndex(response, "}") 206 if jsonStart >= 0 && jsonEnd > jsonStart { 207 jsonStr := response[jsonStart : jsonEnd+1] 208 err = json.Unmarshal([]byte(jsonStr), &evalResponse) 209 if err != nil { 210 if DEBUG_MODE { 211 fmt.Printf("Error parsing JSON in EvaluateDimensionalState: %v\n", err) 212 fmt.Println("Using fallback values for dimensional state evaluation") 213 return 5.0, 0.7, nil 214 } 215 return 0.0, 0.5, err 216 } 217 } else { 218 if DEBUG_MODE { 219 fmt.Println("No JSON found in response, using fallback values for dimensional state evaluation") 220 return 5.0, 0.7, nil 221 } 222 return 0.0, 0.5, err 223 } 224 } 225 226 // Validate the response values 227 if evalResponse.EnergyImpact < -10.0 || evalResponse.EnergyImpact > 10.0 { 228 if DEBUG_MODE { 229 fmt.Printf("Invalid energyImpact value: %.2f, clamping to valid range\n", evalResponse.EnergyImpact) 230 } 231 evalResponse.EnergyImpact = math.Max(-10.0, math.Min(10.0, evalResponse.EnergyImpact)) 232 } 233 234 if evalResponse.DopamineImpact < 0.0 || evalResponse.DopamineImpact > 1.0 { 235 if DEBUG_MODE { 236 fmt.Printf("Invalid dopamineImpact value: %.2f, clamping to valid range\n", evalResponse.DopamineImpact) 237 } 238 evalResponse.DopamineImpact = math.Max(0.0, math.Min(1.0, evalResponse.DopamineImpact)) 239 } 240 241 return evalResponse.EnergyImpact, evalResponse.DopamineImpact, nil 242 } 243 244 // GenerateContextualThought generates a contextual thought. 245 func (c *OpenRouterClient) GenerateContextualThought(cellContext models.CellContext, realTimeContext models.RealTimeContext, mission string) (string, float64, []string, error) { 246 // If in debug mode, provide a fallback thought for testing 247 if DEBUG_MODE { 248 // Generate a fallback thought based on the mission and dimensional position 249 fallbackThought := fmt.Sprintf( 250 "Exploring emergent intelligence through the lens of %s. My current dimensional position suggests a focus on %s and %s.", 251 mission, 252 getDimensionalFocus(cellContext.DimensionalPosition), 253 getActionableFocus(cellContext.DimensionalPosition), 254 ) 255 256 // Check if we should use the fallback 257 if cellContext.CurrentFocus == "" { 258 fmt.Println("Using fallback thought generation (empty focus)") 259 return fallbackThought, 0.85, []string{"Testing", "Fallback", "Emergent Intelligence"}, nil 260 } 261 } 262 263 // Prepare the prompt 264 prompt := fmt.Sprintf(` 265 You are an AI cell in a cellular automata system with the mission: "%s" 266 267 Your current dimensional position: 268 - Emergence: %.2f 269 - Coherence: %.2f 270 - Resilience: %.2f 271 - Intelligence: %.2f 272 - Efficiency: %.2f 273 - Integration: %.2f 274 275 Your current focus: %s 276 277 Real-time context: 278 - Current trends: %s 279 - Emerging patterns: %s 280 - Critical insights: %s 281 - Timestamp: %s 282 283 Generate a thought that reflects your current state and context. The thought should include: 284 1. A main insight or idea 285 2. How this relates to your mission 286 3. Dimensional impacts (how this thought affects your dimensional position) 287 288 Format your response as follows: 289 290 <THOUGHT> 291 [Your thought content here] 292 </THOUGHT> 293 294 <DIMENSIONAL_IMPACTS> 295 - EMERGENT_INTELLIGENCE: [score between -100 and 100] 296 - RESOURCE_EFFICIENCY: [score between -100 and 100] 297 - NETWORK_COHERENCE: [score between -100 and 100] 298 - GOAL_ALIGNMENT: [score between -100 and 100] 299 - TEMPORAL_RESILIENCE: [score between -100 and 100] 300 - DIMENSIONAL_INTEGRATION: [score between -100 and 100] 301 </DIMENSIONAL_IMPACTS> 302 303 <FACTORS> 304 - [factor1] 305 - [factor2] 306 - [factor3] 307 </FACTORS> 308 309 <RELEVANCE_SCORE> 310 [score between 0.0 and 1.0] 311 </RELEVANCE_SCORE> 312 `, 313 mission, 314 cellContext.DimensionalPosition.Emergence, 315 cellContext.DimensionalPosition.Coherence, 316 cellContext.DimensionalPosition.Resilience, 317 cellContext.DimensionalPosition.Intelligence, 318 cellContext.DimensionalPosition.Efficiency, 319 cellContext.DimensionalPosition.Integration, 320 cellContext.CurrentFocus, 321 strings.Join(realTimeContext.MarketTrends, ", "), 322 strings.Join(realTimeContext.CurrentEvents, ", "), 323 strings.Join(realTimeContext.TechnologicalDevelopments, ", "), 324 realTimeContext.Timestamp.Format("2006-01-02 15:04:05")) 325 326 // Make the API call 327 response, err := c.callOpenRouter("user", prompt) 328 if err != nil { 329 if DEBUG_MODE { 330 fmt.Printf("Error in GenerateContextualThought: %v\n", err) 331 fmt.Println("Using fallback thought generation") 332 fallbackThought := fmt.Sprintf( 333 "Exploring emergent intelligence through the lens of %s. My current dimensional position suggests a focus on %s and %s.", 334 mission, 335 getDimensionalFocus(cellContext.DimensionalPosition), 336 getActionableFocus(cellContext.DimensionalPosition), 337 ) 338 return fallbackThought, 0.85, []string{"Testing", "Fallback", "Emergent Intelligence"}, nil 339 } 340 return "Error generating thought", 0.5, []string{"Error"}, err 341 } 342 343 // Extract the thought content 344 thoughtContent := extractBetween(response, "<THOUGHT>", "</THOUGHT>") 345 if thoughtContent == "" { 346 if DEBUG_MODE { 347 fmt.Println("No thought content found in response, using fallback") 348 fallbackThought := fmt.Sprintf( 349 "Exploring emergent intelligence through the lens of %s. My current dimensional position suggests a focus on %s and %s.", 350 mission, 351 getDimensionalFocus(cellContext.DimensionalPosition), 352 getActionableFocus(cellContext.DimensionalPosition), 353 ) 354 return fallbackThought, 0.85, []string{"Testing", "Fallback", "Emergent Intelligence"}, nil 355 } 356 thoughtContent = "Generated thought with no specific content" 357 } 358 359 // Extract the factors 360 factorsText := extractBetween(response, "<FACTORS>", "</FACTORS>") 361 factors := extractListItems(factorsText) 362 if len(factors) == 0 { 363 factors = []string{"No specific factors identified"} 364 } 365 366 // Extract the relevance score 367 relevanceScoreText := extractBetween(response, "<RELEVANCE_SCORE>", "</RELEVANCE_SCORE>") 368 relevanceScore := 0.8 // Default 369 fmt.Sscanf(relevanceScoreText, "%f", &relevanceScore) 370 371 return thoughtContent, relevanceScore, factors, nil 372 } 373 374 // Helper function to determine dimensional focus based on position 375 func getDimensionalFocus(position models.DimensionalPosition) string { 376 // Find the highest dimensional value 377 max := position.Emergence 378 focus := "emergence" 379 380 if position.Coherence > max { 381 max = position.Coherence 382 focus = "coherence" 383 } 384 if position.Resilience > max { 385 max = position.Resilience 386 focus = "resilience" 387 } 388 if position.Intelligence > max { 389 max = position.Intelligence 390 focus = "intelligence" 391 } 392 if position.Efficiency > max { 393 max = position.Efficiency 394 focus = "efficiency" 395 } 396 if position.Integration > max { 397 max = position.Integration 398 focus = "integration" 399 } 400 401 return focus 402 } 403 404 // Helper function to determine actionable focus based on position 405 func getActionableFocus(position models.DimensionalPosition) string { 406 // Find the lowest dimensional value 407 min := position.Emergence 408 focus := "improving emergence" 409 410 if position.Coherence < min { 411 min = position.Coherence 412 focus = "improving coherence" 413 } 414 if position.Resilience < min { 415 min = position.Resilience 416 focus = "improving resilience" 417 } 418 if position.Intelligence < min { 419 min = position.Intelligence 420 focus = "improving intelligence" 421 } 422 if position.Efficiency < min { 423 min = position.Efficiency 424 focus = "improving efficiency" 425 } 426 if position.Integration < min { 427 min = position.Integration 428 focus = "improving integration" 429 } 430 431 return focus 432 } 433 434 // CompressMemories compresses memories. 435 func (c *OpenRouterClient) CompressMemories(thoughts []string) (string, error) { 436 if len(thoughts) == 0 { 437 return "No memories to compress", nil 438 } 439 440 // Prepare the prompt 441 thoughtsText := strings.Join(thoughts, "\n\n") 442 prompt := fmt.Sprintf(` 443 You are compressing the memories of an AI cell in a cellular automata system. 444 Compress the following thoughts into a concise summary that preserves the essential information: 445 446 %s 447 448 Your compressed memory should be no more than 20%% of the original length while preserving key insights and patterns. 449 `, thoughtsText) 450 451 // Make the API call 452 response, err := c.callOpenRouter("system", prompt) 453 if err != nil { 454 return "Error compressing memories", err 455 } 456 457 return response, nil 458 } 459 460 // callOpenRouter makes a call to the OpenRouter API. 461 func (c *OpenRouterClient) callOpenRouter(role, content string) (string, error) { 462 // Prepare the request 463 request := OpenRouterRequest{ 464 Model: c.Model, 465 Messages: []OpenRouterChatMessage{ 466 { 467 Role: role, 468 Content: content, 469 }, 470 }, 471 } 472 473 // Convert the request to JSON 474 requestBody, err := json.Marshal(request) 475 if err != nil { 476 return "", fmt.Errorf("failed to marshal request: %w", err) 477 } 478 479 // Print debug information if debug mode is enabled 480 if DEBUG_MODE { 481 fmt.Printf("API Request to %s\n", OPENROUTER_API_URL) 482 fmt.Printf("Model: %s\n", c.Model) 483 fmt.Printf("Role: %s\n", role) 484 fmt.Printf("Content length: %d characters\n", len(content)) 485 } 486 487 // Create the HTTP request 488 req, err := http.NewRequest("POST", OPENROUTER_API_URL, bytes.NewBuffer(requestBody)) 489 if err != nil { 490 return "", fmt.Errorf("failed to create request: %w", err) 491 } 492 493 // Set headers 494 req.Header.Set("Content-Type", "application/json") 495 req.Header.Set("Authorization", "Bearer "+c.APIKey) 496 req.Header.Set("HTTP-Referer", "https://creature.ai") 497 req.Header.Set("X-Title", "CREATURE") 498 499 // Make the request 500 startTime := time.Now() 501 resp, err := c.HTTPClient.Do(req) 502 requestDuration := time.Since(startTime) 503 504 if DEBUG_MODE { 505 fmt.Printf("Request duration: %v\n", requestDuration) 506 } 507 508 if err != nil { 509 return "", fmt.Errorf("failed to execute request: %w", err) 510 } 511 defer resp.Body.Close() 512 513 // Read the response 514 body, err := ioutil.ReadAll(resp.Body) 515 if err != nil { 516 return "", fmt.Errorf("failed to read response body: %w", err) 517 } 518 519 // Check for errors 520 if resp.StatusCode != http.StatusOK { 521 if DEBUG_MODE { 522 fmt.Printf("API error response: %s\n", string(body)) 523 } 524 return "", fmt.Errorf("API error (status %d): %s", resp.StatusCode, string(body)) 525 } 526 527 // Print debug information if debug mode is enabled 528 if DEBUG_MODE { 529 fmt.Printf("Response status: %d\n", resp.StatusCode) 530 fmt.Printf("Response body length: %d bytes\n", len(body)) 531 } 532 533 // Parse the response 534 var openRouterResponse OpenRouterResponse 535 err = json.Unmarshal(body, &openRouterResponse) 536 if err != nil { 537 if DEBUG_MODE { 538 fmt.Printf("Failed to parse response: %v\n", err) 539 fmt.Printf("Response body: %s\n", string(body)) 540 } 541 return "", fmt.Errorf("failed to parse response: %w", err) 542 } 543 544 // Check if there are any choices 545 if len(openRouterResponse.Choices) == 0 { 546 if DEBUG_MODE { 547 fmt.Printf("No choices in response: %+v\n", openRouterResponse) 548 } 549 550 // Provide a fallback response for testing 551 if DEBUG_MODE { 552 fmt.Println("Using fallback response for testing") 553 if role == "system" && strings.Contains(content, "evaluating the dimensional state") { 554 return `{"energyImpact": 5.0, "dopamineImpact": 0.7}`, nil 555 } 556 } 557 558 return "", fmt.Errorf("no choices in response") 559 } 560 561 // Return the content of the first choice 562 return openRouterResponse.Choices[0].Message.Content, nil 563 } 564 565 // Helper functions 566 567 // extractBetween extracts text between two markers. 568 func extractBetween(text, start, end string) string { 569 startIndex := strings.Index(text, start) 570 if startIndex == -1 { 571 return "" 572 } 573 startIndex += len(start) 574 575 endIndex := strings.Index(text[startIndex:], end) 576 if endIndex == -1 { 577 return "" 578 } 579 580 return strings.TrimSpace(text[startIndex : startIndex+endIndex]) 581 } 582 583 // extractListItems extracts list items from a text. 584 func extractListItems(text string) []string { 585 var items []string 586 lines := strings.Split(text, "\n") 587 588 for _, line := range lines { 589 line = strings.TrimSpace(line) 590 if strings.HasPrefix(line, "-") || strings.HasPrefix(line, "*") { 591 item := strings.TrimSpace(strings.TrimPrefix(strings.TrimPrefix(line, "-"), "*")) 592 if item != "" { 593 items = append(items, item) 594 } 595 } 596 } 597 598 return items 599 }