/ src / solace_agent_mesh / common / error_handlers.py
error_handlers.py
  1  """Centralized error handlers for Solace Agent Mesh."""
  2  
  3  from typing import Tuple
  4  
  5  from litellm.exceptions import (
  6      AuthenticationError,
  7      BadRequestError,
  8      BudgetExceededError,
  9      ContentPolicyViolationError,
 10      ContextWindowExceededError,
 11      APIConnectionError,
 12      InternalServerError,
 13      NotFoundError,
 14      PermissionDeniedError,
 15      RateLimitError,
 16      ServiceUnavailableError,
 17      Timeout,
 18  )
 19  
 20  
 21  # Tuple of all recognized litellm exception types, for use in except clauses
 22  LITELLM_EXCEPTIONS = (
 23      AuthenticationError,
 24      BadRequestError,
 25      BudgetExceededError,
 26      ContentPolicyViolationError,
 27      ContextWindowExceededError,
 28      APIConnectionError,
 29      InternalServerError,
 30      NotFoundError,
 31      PermissionDeniedError,
 32      RateLimitError,
 33      ServiceUnavailableError,
 34      Timeout,
 35  )
 36  
 37  
 38  # User-facing error messages
 39  CONTEXT_LIMIT_ERROR_MESSAGE = (
 40      "The conversation history has become too long for the AI model to process. "
 41      "This can happen after extended conversations. "
 42      "To continue, please start a new conversation."
 43  )
 44  
 45  DEFAULT_BAD_REQUEST_MESSAGE = (
 46      "The LLM service rejected the request. "
 47      "Try rephrasing the message. If the problem persists, "
 48      "contact an administrator."
 49  )
 50  
 51  AUTHENTICATION_ERROR_MESSAGE = (
 52      "The LLM service rejected the authentication credentials. "
 53      "Contact an administrator to verify the API key or authentication configuration."
 54  )
 55  
 56  RATE_LIMIT_ERROR_MESSAGE = (
 57      "The LLM service rate limit has been exceeded. "
 58      "Wait a moment and try again. If this persists, "
 59      "contact an administrator to review rate limits or adjust the plan."
 60  )
 61  
 62  SERVICE_UNAVAILABLE_ERROR_MESSAGE = (
 63      "The LLM service is temporarily unavailable. "
 64      "Try again in a few minutes. If the problem persists, "
 65      "contact an administrator to check the service status."
 66  )
 67  
 68  API_CONNECTION_ERROR_MESSAGE = (
 69      "Unable to connect to the LLM service. "
 70      "This may be due to a network issue or incorrect endpoint configuration. "
 71      "Contact an administrator to verify the connection settings."
 72  )
 73  
 74  TIMEOUT_ERROR_MESSAGE = (
 75      "The request to the LLM service timed out. "
 76      "This may be due to high load or a complex request. "
 77      "Try again. If this persists, contact an administrator."
 78  )
 79  
 80  CONTENT_POLICY_VIOLATION_MESSAGE = (
 81      "The request was blocked by content safety filters. "
 82      "Rephrase the request and try again."
 83  )
 84  
 85  NOT_FOUND_ERROR_MESSAGE = (
 86      "The configured LLM model was not found. "
 87      "Contact an administrator to verify the model name and provider configuration."
 88  )
 89  
 90  PERMISSION_DENIED_ERROR_MESSAGE = (
 91      "Access to the LLM model was denied. "
 92      "Contact an administrator to verify the API permissions and access configuration."
 93  )
 94  
 95  INTERNAL_SERVER_ERROR_MESSAGE = (
 96      "The LLM service encountered an internal error. "
 97      "Try again. If this persists, contact an administrator."
 98  )
 99  
100  BUDGET_EXCEEDED_ERROR_MESSAGE = (
101      "The LLM usage budget has been exceeded. "
102      "Contact an administrator to review and adjust the budget limits."
103  )
104  
105  DEFAULT_LLM_ERROR_MESSAGE = (
106      "An error occurred while communicating with the LLM service. "
107      "Please try again. If the problem persists, contact an administrator."
108  )
109  
110  
111  def is_llm_exception(exception: Exception) -> bool:
112      """Check if the exception is a known litellm exception type."""
113      return isinstance(exception, LITELLM_EXCEPTIONS)
114  
115  
116  def _is_context_limit_error(exception: Exception) -> bool:
117      """
118      Detects if an exception is a context/token limit error from LiteLLM.
119  
120      Args:
121          exception: The exception to check
122  
123      Returns:
124          True if the exception indicates a context limit error
125      """
126      if isinstance(exception, ContextWindowExceededError):
127          return True
128  
129      if not isinstance(exception, BadRequestError):
130          return False
131  
132      error_str = str(exception).lower()
133  
134      # Context limit error patterns from various LLM providers
135      context_limit_patterns = [
136          "too many tokens",
137          "expected maxlength:",
138          "input is too long",
139          "prompt is too long",
140          "prompt: length: 1..",
141          "too many input tokens",
142      ]
143  
144      return any(pattern in error_str for pattern in context_limit_patterns)
145  
146  
147  def _get_user_friendly_error_message(exception: Exception) -> str:
148      """
149      Returns a user-friendly error message for the given exception.
150  
151      Args:
152          exception: The exception to get a message for
153  
154      Returns:
155          User-friendly error message string
156      """
157      if _is_context_limit_error(exception):
158          return CONTEXT_LIMIT_ERROR_MESSAGE
159  
160      # Check subclasses before BadRequestError base class
161      if isinstance(exception, ContentPolicyViolationError):
162          return CONTENT_POLICY_VIOLATION_MESSAGE
163  
164      if isinstance(exception, BadRequestError):
165          return DEFAULT_BAD_REQUEST_MESSAGE
166  
167      if isinstance(exception, AuthenticationError):
168          return AUTHENTICATION_ERROR_MESSAGE
169  
170      if isinstance(exception, RateLimitError):
171          return RATE_LIMIT_ERROR_MESSAGE
172  
173      if isinstance(exception, ServiceUnavailableError):
174          return SERVICE_UNAVAILABLE_ERROR_MESSAGE
175  
176      if isinstance(exception, APIConnectionError):
177          return API_CONNECTION_ERROR_MESSAGE
178  
179      if isinstance(exception, Timeout):
180          return TIMEOUT_ERROR_MESSAGE
181  
182      if isinstance(exception, NotFoundError):
183          return NOT_FOUND_ERROR_MESSAGE
184  
185      if isinstance(exception, PermissionDeniedError):
186          return PERMISSION_DENIED_ERROR_MESSAGE
187  
188      if isinstance(exception, InternalServerError):
189          return INTERNAL_SERVER_ERROR_MESSAGE
190  
191      if isinstance(exception, BudgetExceededError):
192          return BUDGET_EXCEEDED_ERROR_MESSAGE
193  
194      return DEFAULT_LLM_ERROR_MESSAGE
195  
196  
197  def get_error_message(
198      exception: Exception,
199  ) -> Tuple[str, bool]:
200      """
201      Handles LLM-related exceptions and returns error information.
202  
203      Args:
204          exception: The exception to handle
205  
206      Returns:
207          Tuple of (error_message, is_context_limit_error)
208      """
209      is_context_limit = _is_context_limit_error(exception)
210      error_message = _get_user_friendly_error_message(exception)
211  
212      return error_message, is_context_limit