settings-session-model-normalization.test.ts
1 import { beforeEach, describe, expect, it, vi } from 'vitest' 2 3 import type { GlobalDefaults } from '@/lib/shared/chat' 4 import type { ProviderWithSettings } from '@/lib/shared/providers' 5 import { buildSessionSettingsSummary } from '@/server/settings/service' 6 import { getSessionProviderOverride } from '@/server/providers/session-overrides' 7 import { 8 getSessionChatModelOverride, 9 getSessionReasoningLevelOverride, 10 getSessionThinkingLevelOverride, 11 } from '@/server/storage/chat-store' 12 13 vi.mock('@/server/providers/session-overrides', () => ({ 14 clearSessionProviderOverride: vi.fn(), 15 getSessionProviderOverride: vi.fn(() => null), 16 setSessionProviderOverride: vi.fn(), 17 })) 18 19 vi.mock('@/server/storage/chat-store', () => ({ 20 ensureSession: vi.fn(() => ({ id: 'session-1' })), 21 getSessionChatModelOverride: vi.fn(() => null), 22 getSessionThinkingLevelOverride: vi.fn(() => null), 23 getSessionReasoningLevelOverride: vi.fn(() => null), 24 setSessionChatModelOverride: vi.fn(), 25 setSessionThinkingLevelOverride: vi.fn(), 26 setSessionReasoningLevelOverride: vi.fn(), 27 })) 28 29 vi.mock('@/server/config/runtime', () => ({ 30 getRuntimeConfig: vi.fn(() => ({ 31 llmBaseUrl: 'https://runtime.openai.example.com/v1', 32 llmApiKey: 'runtime-openai-key', 33 llmChatModel: 'runtime-default-model', 34 llmTranscriptionModel: null, 35 llmEmbeddingModel: null, 36 llmSystemPrompt: 'system', 37 llmTemperature: 0.3, 38 llmTimeoutMs: 30_000, 39 dataDirName: 'data', 40 sqliteVecExtensionPath: null, 41 memorySemanticTopK: 6, 42 memorySimilarityThreshold: 0.15, 43 pricing: { 44 chatInputUsdPerMillionTokens: null, 45 chatOutputUsdPerMillionTokens: null, 46 embeddingInputUsdPerMillionTokens: null, 47 transcriptionUsdPerMinute: null, 48 }, 49 mockMode: false, 50 })), 51 })) 52 53 function makeDefaults(overrides: Partial<GlobalDefaults> = {}): GlobalDefaults { 54 return { 55 chatModel: null, 56 embeddingModel: null, 57 transcriptionModel: null, 58 embeddingProviderId: null, 59 transcriptionProviderId: null, 60 providerModelDefaults: {}, 61 thinkingLevel: null, 62 reasoningLevel: null, 63 defaultProviderId: 'custom-mm816kwa-lj3xgl', 64 providerEndpointMode: 'responses', 65 webSearchProvider: null, 66 webSearchMaxResults: null, 67 webFetchMaxBytes: null, 68 updatedAt: '2026-03-03T00:00:00.000Z', 69 ...overrides, 70 } 71 } 72 73 function makeProviders(): ProviderWithSettings[] { 74 return [ 75 { 76 id: 'custom-mm816kwa-lj3xgl', 77 name: 'OpenRouter', 78 type: 'openai', 79 isCustom: true, 80 settings: { 81 apiHost: 'https://openrouter.ai/api/v1', 82 apiPath: '/responses', 83 apiKey: 'router-key', 84 }, 85 }, 86 { 87 id: 'openai', 88 name: 'OpenAI', 89 type: 'openai', 90 isCustom: false, 91 settings: { 92 apiHost: 'https://api.openai.com/v1', 93 apiPath: '/responses', 94 apiKey: 'openai-key', 95 }, 96 }, 97 ] as ProviderWithSettings[] 98 } 99 100 describe('buildSessionSettingsSummary model normalization', () => { 101 beforeEach(() => { 102 vi.clearAllMocks() 103 vi.mocked(getSessionProviderOverride).mockReturnValue(null) 104 vi.mocked(getSessionThinkingLevelOverride).mockReturnValue(null) 105 vi.mocked(getSessionReasoningLevelOverride).mockReturnValue(null) 106 }) 107 108 it('converts same-provider namespaced session override IDs to provider model refs', () => { 109 vi.mocked(getSessionChatModelOverride).mockReturnValue( 110 'chat:custom-mm816kwa-lj3xgl:arcee-ai/trinity-mini:free', 111 ) 112 113 const summary = buildSessionSettingsSummary( 114 'session-1', 115 makeDefaults({ 116 providerModelDefaults: { 117 'custom-mm816kwa-lj3xgl': 'arcee-ai/trinity-mini:free', 118 }, 119 }), 120 makeProviders(), 121 ) 122 123 expect(summary.chatModelOverride).toBe('arcee-ai/trinity-mini:free') 124 expect(summary.activeChatModel).toBe('arcee-ai/trinity-mini:free') 125 expect(summary.providerProfileId).toBe('custom-mm816kwa-lj3xgl') 126 }) 127 128 it('clears cross-provider namespaced session overrides and falls back to provider default', () => { 129 vi.mocked(getSessionChatModelOverride).mockReturnValue('chat:openai:gpt-5-mini') 130 131 const summary = buildSessionSettingsSummary( 132 'session-1', 133 makeDefaults({ 134 providerModelDefaults: { 135 'custom-mm816kwa-lj3xgl': 'arcee-ai/trinity-mini:free', 136 }, 137 }), 138 makeProviders(), 139 ) 140 141 expect(summary.chatModelOverride).toBeNull() 142 expect(summary.activeChatModel).toBe('arcee-ai/trinity-mini:free') 143 expect(summary.providerProfileId).toBe('custom-mm816kwa-lj3xgl') 144 }) 145 })