/ apps / web / src / hooks / useAIChat.ts
useAIChat.ts
  1  import { useState, useCallback } from 'react'
  2  import { useAccount } from 'wagmi'
  3  import { useTranslation } from 'react-i18next'
  4  import { litProtocolService } from '../lib/litProtocol'
  5  import { useLitSession } from './useLitSession'
  6  import { usePGLiteSRS } from './usePGLiteSRS'
  7  import { useUmamiReady } from './useUmamiReady'
  8  import { contextSelector } from '../services/ai/contextSelector'
  9  import type { ChatMessageProps } from '../components/Chat/ChatMessage'
 10  
 11  // Token limits for the model
 12  const MAX_HISTORY_TOKENS = 3000 // Approximate tokens for history
 13  const MAX_RESPONSE_TOKENS = 1000
 14  
 15  // Helper to estimate token count (rough approximation)
 16  const estimateTokens = (text: string): number => {
 17    // Rough estimate: 1 token ≈ 4 characters
 18    return Math.ceil(text.length / 4)
 19  }
 20  
 21  // Helper to trim chat history to fit within token limits
 22  const trimChatHistory = (messages: ChatMessageProps[]): ChatMessageProps[] => {
 23    const trimmed: ChatMessageProps[] = []
 24    let totalTokens = 0
 25    
 26    // Start from the most recent messages and work backwards
 27    for (let i = messages.length - 1; i >= 0; i--) {
 28      // Only process string content
 29      if (typeof messages[i].content !== 'string') continue
 30      
 31      const messageTokens = estimateTokens(messages[i].content as string)
 32      if (totalTokens + messageTokens > MAX_HISTORY_TOKENS) break
 33      trimmed.unshift(messages[i])
 34      totalTokens += messageTokens
 35    }
 36    
 37    return trimmed
 38  }
 39  
 40  export interface AIMessage {
 41    role: 'user' | 'assistant'
 42    content: string
 43  }
 44  
 45  export interface SRSContext {
 46    recentKaraoke?: Array<{
 47      songTitle: string
 48      artistName: string
 49      totalScore: number
 50      needsPracticeLines?: string[]
 51    }>
 52    dueCards?: number
 53    stats?: {
 54      totalCards: number
 55      averageScore: number
 56    }
 57    currentSong?: {
 58      title: string
 59      artist: string
 60    }
 61    // Dynamic context fields
 62    streak?: {
 63      streak: number
 64      isActive: boolean
 65      milestone: boolean
 66    }
 67    performance?: {
 68      accuracy: number
 69      totalCards: number
 70      strugglingAreas: string[]
 71    }
 72    workload?: {
 73      dueCount: number
 74      overdue: number
 75      subjects: string[]
 76    }
 77    activity?: {
 78      lastStudied: number | null
 79      frequency: number
 80      averageDuration: number
 81    }
 82    favorites?: {
 83      topSongs: Array<{
 84        title: string
 85        artist: string
 86        totalCards: number
 87      }>
 88      genre: string | null
 89    }
 90    patterns?: {
 91      preferredTime: number | null
 92      consistency: number
 93      sessionLength: number
 94    }
 95    _meta?: {
 96      reason: string
 97      bundleCount: number
 98      bundleIds: string[]
 99    }
100  }
101  
102  interface ChatResponse {
103    message: string
104    usage?: {
105      prompt_tokens: number
106      completion_tokens: number
107      total_tokens: number
108    }
109  }
110  
111  interface UseAIChatReturn {
112    sendMessage: (message: string, chatHistory: ChatMessageProps[]) => Promise<ChatResponse>
113    isLoading: boolean
114    error: string | null
115  }
116  
117  export function useAIChat(): UseAIChatReturn {
118    const [isLoading, setIsLoading] = useState(false)
119    const [error, setError] = useState<string | null>(null)
120    const { sessionSigs, createSession } = useLitSession()
121    const { address } = useAccount()
122    const { getDueCards, getUserStats } = usePGLiteSRS()
123    const { i18n } = useTranslation()
124    const isUmamiReady = useUmamiReady()
125  
126    const sendMessage = useCallback(async (message: string, chatHistory: ChatMessageProps[]): Promise<ChatResponse> => {
127      setIsLoading(true)
128      setError(null)
129  
130      try {
131        // Ensure we have a Lit session
132        let sigs = sessionSigs
133        if (!sigs) {
134          console.log('🔐 Creating Lit session for AI chat...')
135          sigs = await createSession()
136          if (!sigs) {
137            throw new Error('Failed to create Lit session')
138          }
139        }
140  
141        // Fetch user's public IP for analytics
142        let userIp = 'unknown';
143        try {
144          const ipResponse = await fetch('https://api.ipify.org?format=json');
145          if (ipResponse.ok) {
146            const ipData = await ipResponse.json();
147            userIp = ipData.ip;
148            console.log('🌐 User IP for analytics:', userIp);
149          }
150        } catch (ipError) {
151          console.warn('Failed to fetch user IP:', ipError);
152        }
153        
154        // Get user agent
155        const userAgent = navigator.userAgent;
156  
157        // Get current chat history
158        const history = trimChatHistory(chatHistory)
159        
160        // Log chat context details
161        console.log('📊 Chat Context Summary:')
162        console.log(`- Total messages in history: ${chatHistory.length}`)
163        console.log(`- Messages after trimming: ${history.length}`)
164        console.log(`- Current message length: ${message.length} chars (~${estimateTokens(message)} tokens)`)
165        
166        let totalTokens = estimateTokens(message)
167        history.forEach((msg, idx) => {
168          // Only process string content
169          if (typeof msg.content !== 'string') return
170          
171          const tokens = estimateTokens(msg.content)
172          totalTokens += tokens
173          console.log(`- Message ${idx + 1} (${msg.role}): ${msg.content.substring(0, 50)}... (${tokens} tokens)`)
174        })
175        console.log(`- Total estimated tokens: ${totalTokens}`)
176        
177        // Build dynamic SRS context
178        let srsContext: SRSContext = {}
179        
180        try {
181          // Get user stats first
182          const stats = await getUserStats()
183          
184          // Determine if user is asking about specific topics
185          const messageLower = message.toLowerCase()
186          const forceInclude: string[] = []
187          if (messageLower.includes('streak')) forceInclude.push('studyStreak')
188          if (messageLower.includes('score') || messageLower.includes('performance')) forceInclude.push('performanceMetrics')
189          if (messageLower.includes('review') || messageLower.includes('due')) forceInclude.push('currentStudyLoad')
190          
191          // Select context bundles dynamically
192          const contextSelection = stats ? await contextSelector.selectContext(stats, {
193            maxBundles: 3 + Math.floor(Math.random() * 2), // 3-4 bundles
194            forceInclude
195          }) : { bundles: [], metadata: { selectionReason: 'No user stats available' } }
196          
197          // Transform selected bundles into SRS context
198          contextSelection.bundles.forEach(bundle => {
199            if (!bundle.data) return
200            
201            switch (bundle.id) {
202              case 'studyStreak':
203                srsContext.streak = bundle.data as any
204                break
205              case 'performanceMetrics':
206                srsContext.performance = bundle.data as any
207                break
208              case 'currentStudyLoad':
209                srsContext.workload = bundle.data as any
210                break
211              case 'recentActivity':
212                srsContext.activity = bundle.data as any
213                break
214              case 'favorites':
215                srsContext.favorites = bundle.data as any
216                break
217              case 'studyPatterns':
218                srsContext.patterns = bundle.data as any
219                break
220            }
221          })
222          
223          // Add metadata
224          srsContext._meta = {
225            reason: contextSelection.metadata.selectionReason,
226            bundleCount: contextSelection.bundles.length,
227            bundleIds: contextSelection.bundles.map(b => b.id)
228          }
229          
230          // Also keep the basic stats for backward compatibility
231          if (stats) {
232            srsContext.stats = {
233              totalCards: stats.total_cards,
234              averageScore: 0 // TODO: Calculate from actual review data
235            }
236            srsContext.dueCards = stats.total_due_cards
237          }
238          
239          console.log('📚 Dynamic SRS Context:', {
240            bundles: contextSelection.bundles.map(b => b.id),
241            reason: contextSelection.metadata.selectionReason,
242            data: srsContext
243          })
244          
245        } catch (err) {
246          console.warn('Failed to get SRS context:', err)
247          // Continue without SRS context
248        }
249  
250        // Ensure Lit Protocol is connected
251        if (!litProtocolService.litNodeClient) {
252          await litProtocolService.connect()
253        }
254        
255        // Load and execute the Lit Action
256        // Use base URL without hash for proper file resolution
257        const baseUrl = window.location.origin + window.location.pathname
258        const litActionUrl = new URL('./lit-actions/chat/chat-with-ai.js', baseUrl).href
259        console.log('📄 Loading Lit Action from:', litActionUrl)
260        
261        const litActionResponse = await fetch(litActionUrl)
262        if (!litActionResponse.ok) {
263          throw new Error(`Failed to load Lit Action: ${litActionResponse.status} ${litActionResponse.statusText}`)
264        }
265        const litActionCode = await litActionResponse.text()
266        
267        console.log('🚀 Executing Lit Action with params:')
268        console.log(`- Max response tokens: ${MAX_RESPONSE_TOKENS}`)
269        console.log(`- Chat history length: ${history.length} messages`)
270        console.log(`- SRS context included: ${Object.keys(srsContext).length > 0}`)
271        console.log(`- User language: ${i18n.language}`)
272        
273        // Track chat message sent
274        if (isUmamiReady && window.umami) {
275          const eventData = {
276            messageLength: message.length,
277            historyLength: history.length,
278            language: i18n.language,
279            hasAddress: !!address,
280            hasSRSContext: Object.keys(srsContext).length > 0
281          }
282          console.log('📊 Tracking chat_message_sent:', eventData)
283          window.umami.track('chat_message_sent', eventData)
284        }
285        
286        console.log('📝 Lit Action code loaded, length:', litActionCode.length)
287        console.log('🔑 Session sigs available:', !!sigs)
288        
289        let result
290        try {
291          result = await litProtocolService.getClient().executeJs({
292            code: litActionCode,
293            sessionSigs: sigs,
294            jsParams: {
295              userMessage: message,
296              chatHistory: history.map(msg => ({
297                role: msg.role,
298                content: typeof msg.content === 'string' ? msg.content : ''
299              })),
300              srsContext: srsContext,
301              maxResponseTokens: MAX_RESPONSE_TOKENS,
302              userLanguage: i18n.language,
303              userIp: userIp,
304              userAgent: userAgent,
305              userAddress: address || 'anonymous'
306            }
307          })
308          console.log('✅ Lit Action executed, result:', result)
309        } catch (executeError) {
310          console.error('❌ Lit Action execution error:', executeError)
311          throw executeError
312        }
313  
314        // Parse the response - it's returned as a JSON string
315        let response
316        try {
317          if (typeof result.response === 'string') {
318            response = JSON.parse(result.response)
319          } else {
320            response = result.response
321          }
322        } catch (error) {
323          console.error('Failed to parse response:', result.response)
324          throw new Error('Failed to parse AI response')
325        }
326        
327        if (!response.success) {
328          if (response.error === 'QUOTA_EXCEEDED') {
329            // Create a special error that components can handle differently
330            const quotaError = new Error('QUOTA_EXCEEDED')
331            ;(quotaError as any).isQuotaError = true
332            throw quotaError
333          }
334          throw new Error(response.error || 'Failed to get AI response')
335        }
336        
337        console.log('✅ AI Response received:')
338        console.log(`- Response length: ${response.message.length} chars (~${estimateTokens(response.message)} tokens)`)
339        if (response.usage) {
340          console.log('📊 Token usage from API:', response.usage)
341        }
342  
343        // Track successful chat response
344        if (isUmamiReady && window.umami) {
345          const eventData = {
346            responseLength: response.message.length,
347            responseTokens: response.usage?.completion_tokens || estimateTokens(response.message),
348            totalTokens: response.usage?.total_tokens || 0,
349            language: i18n.language
350          }
351          console.log('📊 Tracking chat_response_received:', eventData)
352          window.umami.track('chat_response_received', eventData)
353        }
354  
355        return {
356          message: response.message,
357          usage: response.usage
358        }
359      } catch (err) {
360        const errorMessage = err instanceof Error ? err.message : 'Failed to send message'
361        setError(errorMessage)
362        
363        // Track chat error
364        if (isUmamiReady && window.umami) {
365          const eventData = {
366            error: errorMessage,
367            isQuotaError: errorMessage.includes('quota') || errorMessage.includes('QUOTA_EXCEEDED'),
368            language: i18n.language
369          }
370          console.log('📊 Tracking chat_error:', eventData)
371          window.umami.track('chat_error', eventData)
372        }
373        
374        throw err
375      } finally {
376        setIsLoading(false)
377      }
378    }, [sessionSigs, createSession, getDueCards, getUserStats, i18n.language, isUmamiReady])
379  
380    return {
381      sendMessage,
382      isLoading,
383      error
384    }
385  }