test_llm.py
1 from chat_workflow.llm.providers.ollama import OllamaProvider 2 3 provider = OllamaProvider() 4 5 6 def testparse_ollama_params_empty(): 7 assert provider.parse_ollama_params("") == {} 8 assert provider.parse_ollama_params(None) == {} 9 assert provider.parse_ollama_params(" ") == {} 10 11 12 def testparse_ollama_params_numeric(): 13 # Integer parameters 14 input_str = """num_ctx 4096 15 num_gpu 2 16 num_thread 4 17 num_predict 128 18 repeat_last_n 64 19 top_k 40""" 20 21 expected = { 22 "num_ctx": 4096, 23 "num_gpu": 2, 24 "num_thread": 4, 25 "num_predict": 128, 26 "repeat_last_n": 64, 27 "top_k": 40 28 } 29 assert provider.parse_ollama_params(input_str) == expected 30 31 32 def testparse_ollama_params_float(): 33 # Float parameters 34 input_str = """mirostat_eta 0.1 35 mirostat_tau 5.0 36 repeat_penalty 1.1 37 temperature 0.8 38 tfs_z 1.0 39 top_p 0.9""" 40 41 expected = { 42 "mirostat_eta": 0.1, 43 "mirostat_tau": 5.0, 44 "repeat_penalty": 1.1, 45 "temperature": 0.8, 46 "tfs_z": 1.0, 47 "top_p": 0.9 48 } 49 assert provider.parse_ollama_params(input_str) == expected 50 51 52 def testparse_ollama_params_stop(): 53 # Multiple stop sequences with different formats 54 input_str = """stop "[INST]" 55 stop '[/INST]' 56 stop </s> 57 stop "User:" 58 stop 'Assistant:'""" 59 60 expected = { 61 "stop": ["[INST]", "[/INST]", "</s>", "User:", "Assistant:"] 62 } 63 assert provider.parse_ollama_params(input_str) == expected 64 65 66 def testparse_ollama_params_mixed(): 67 # Combination of different parameter types 68 input_str = """num_ctx 4096 69 temperature 0.8 70 stop "[INST]" 71 stop "[/INST]" 72 num_gpu 1 73 mirostat_eta 0.1 74 format json 75 seed 42""" 76 77 expected = { 78 "num_ctx": 4096, 79 "temperature": 0.8, 80 "stop": ["[INST]", "[/INST]"], 81 "num_gpu": 1, 82 "mirostat_eta": 0.1, 83 "format": "json", 84 "seed": 42 85 } 86 assert provider.parse_ollama_params(input_str) == expected 87 88 89 def testparse_ollama_params_whitespace(): 90 # Test various whitespace formats 91 input_str = """num_ctx 4096 92 temperature 0.8 93 stop "[INST]" 94 """ 95 96 expected = { 97 "num_ctx": 4096, 98 "temperature": 0.8, 99 "stop": ["[INST]"] 100 } 101 assert provider.parse_ollama_params(input_str) == expected