/ restai / chat.py
chat.py
 1  from datetime import datetime
 2  from uuid import uuid4
 3  from llama_index.core.memory import ChatSummaryMemoryBuffer
 4  from llama_index.core.storage.chat_store import BaseChatStore
 5  from restai.models.models import ChatModel
 6  
 7  CONTEXT_WINDOW_RATIO = 0.75  # Reserve 25% of context window for response
 8  
 9  
10  class Chat:
11      def __init__(self, model: ChatModel, chat_store: BaseChatStore, token_limit: int = 3900, llm=None):
12          self.model: ChatModel = model
13  
14          if not model.id:
15              self.chat_id = str(uuid4())
16          else:
17              self.chat_id = model.id
18  
19          self.memory = ChatSummaryMemoryBuffer.from_defaults(
20              token_limit=token_limit,
21              llm=llm,
22              chat_store=chat_store,
23              chat_store_key=f"memory_{self.chat_id}",
24          )
25  
26          self.created: datetime = datetime.now()
27  
28      def clear_history(self):
29          self.memory.reset()
30  
31      def __eq__(self, other: "Chat"):
32          return self.chat_id == other.chat_id