glm_provider_comparison_test.erl
1 %%%------------------------------------------------------------------- 2 %%% @doc GLM Provider 对比测试 3 %%% 4 %%% 测试两种配置: 5 %%% 1. GLM-4.7 + Anthropic Provider (base_url: https://open.bigmodel.cn/api/anthropic) 6 %%% 2. GLM-4.6 + Zhipu Provider 7 %%% 8 %%% 运行方式: 9 %%% ZHIPU_API_KEY=your_key rebar3 eunit --module=glm_provider_comparison_test 10 %%% 11 %%% @end 12 %%%------------------------------------------------------------------- 13 -module(glm_provider_comparison_test). 14 15 -include_lib("eunit/include/eunit.hrl"). 16 17 %%==================================================================== 18 %% 测试 1: GLM-4.7 + Anthropic Provider 19 %%==================================================================== 20 21 glm47_anthropic_test_() -> 22 case os:getenv("ZHIPU_API_KEY") of 23 false -> 24 {"跳过 GLM-4.7 Anthropic 测试 (未设置 ZHIPU_API_KEY)", []}; 25 ApiKey -> 26 {"GLM-4.7 via Anthropic Provider", {timeout, 120, [ 27 {"简单对话", fun() -> 28 io:format("~n=== 测试 GLM-4.7 + Anthropic Provider ===~n"), 29 Config = llm_client:config(anthropic, #{ 30 api_key => list_to_binary(ApiKey), 31 base_url => <<"https://open.bigmodel.cn/api/anthropic">>, 32 model => <<"glm-4.7">>, 33 max_tokens => 100, 34 timeout => 60000 35 }), 36 io:format("Config: ~p~n", [maps:without([api_key], Config)]), 37 38 Messages = [#{role => user, content => <<"你好,请用一句话介绍自己"/utf8>>}], 39 io:format("发送请求...~n"), 40 41 Result = llm_client:chat(Config, Messages), 42 io:format("结果: ~p~n", [Result]), 43 44 ?assertMatch({ok, _}, Result), 45 {ok, Response} = Result, 46 Content = maps:get(content, Response), 47 ?assert(is_binary(Content)), 48 ?assert(byte_size(Content) > 0), 49 io:format("回复内容: ~ts~n", [Content]), 50 io:format("=== GLM-4.7 Anthropic 测试通过 ===~n~n") 51 end}, 52 53 {"工具调用", fun() -> 54 io:format("~n=== 测试 GLM-4.7 Anthropic 工具调用 ===~n"), 55 Config = llm_client:config(anthropic, #{ 56 api_key => list_to_binary(ApiKey), 57 base_url => <<"https://open.bigmodel.cn/api/anthropic">>, 58 model => <<"glm-4.7">>, 59 max_tokens => 200, 60 timeout => 60000 61 }), 62 63 Messages = [#{role => user, content => <<"北京现在几点了?"/utf8>>}], 64 Tools = [ 65 #{ 66 name => <<"get_current_time">>, 67 description => <<"获取指定城市的当前时间"/utf8>>, 68 parameters => #{ 69 type => object, 70 properties => #{ 71 city => #{type => string, description => <<"城市名称"/utf8>>} 72 }, 73 required => [<<"city">>] 74 } 75 } 76 ], 77 78 Result = llm_client:with_tools(Config, Messages, Tools), 79 io:format("工具调用结果: ~p~n", [Result]), 80 81 ?assertMatch({ok, _}, Result), 82 {ok, Response} = Result, 83 ToolCalls = maps:get(tool_calls, Response, []), 84 Content = maps:get(content, Response, null), 85 io:format("Tool calls: ~p, Content: ~p~n", [ToolCalls, Content]), 86 ?assert(ToolCalls =/= [] orelse Content =/= null), 87 io:format("=== GLM-4.7 Anthropic 工具调用测试通过 ===~n~n") 88 end} 89 ]}} 90 end. 91 92 %%==================================================================== 93 %% 测试 2: GLM-4.6 + Zhipu Provider 94 %%==================================================================== 95 96 glm46_zhipu_test_() -> 97 case os:getenv("ZHIPU_API_KEY") of 98 false -> 99 {"跳过 GLM-4.6 Zhipu 测试 (未设置 ZHIPU_API_KEY)", []}; 100 ApiKey -> 101 {"GLM-4.6 via Zhipu Provider", {timeout, 120, [ 102 {"简单对话", fun() -> 103 io:format("~n=== 测试 GLM-4.6 + Zhipu Provider ===~n"), 104 Config = llm_client:config(zhipu, #{ 105 api_key => list_to_binary(ApiKey), 106 model => <<"glm-4.6">>, 107 max_tokens => 100, 108 timeout => 60000 109 }), 110 io:format("Config: ~p~n", [maps:without([api_key], Config)]), 111 112 Messages = [#{role => user, content => <<"你好,请用一句话介绍自己"/utf8>>}], 113 io:format("发送请求...~n"), 114 115 Result = llm_client:chat(Config, Messages), 116 io:format("结果: ~p~n", [Result]), 117 118 ?assertMatch({ok, _}, Result), 119 {ok, Response} = Result, 120 Content = maps:get(content, Response), 121 ?assert(is_binary(Content)), 122 ?assert(byte_size(Content) > 0), 123 io:format("回复内容: ~ts~n", [Content]), 124 io:format("=== GLM-4.6 Zhipu 测试通过 ===~n~n") 125 end}, 126 127 {"工具调用", fun() -> 128 io:format("~n=== 测试 GLM-4.6 Zhipu 工具调用 ===~n"), 129 Config = llm_client:config(zhipu, #{ 130 api_key => list_to_binary(ApiKey), 131 model => <<"glm-4.6">>, 132 max_tokens => 200, 133 timeout => 60000 134 }), 135 136 Messages = [#{role => user, content => <<"上海现在几点了?"/utf8>>}], 137 Tools = [ 138 #{ 139 name => <<"get_current_time">>, 140 description => <<"获取指定城市的当前时间"/utf8>>, 141 parameters => #{ 142 type => object, 143 properties => #{ 144 city => #{type => string, description => <<"城市名称"/utf8>>} 145 }, 146 required => [<<"city">>] 147 } 148 } 149 ], 150 151 Result = llm_client:with_tools(Config, Messages, Tools), 152 io:format("工具调用结果: ~p~n", [Result]), 153 154 ?assertMatch({ok, _}, Result), 155 {ok, Response} = Result, 156 ToolCalls = maps:get(tool_calls, Response, []), 157 Content = maps:get(content, Response, null), 158 io:format("Tool calls: ~p, Content: ~p~n", [ToolCalls, Content]), 159 ?assert(ToolCalls =/= [] orelse Content =/= null), 160 io:format("=== GLM-4.6 Zhipu 工具调用测试通过 ===~n~n") 161 end}, 162 163 {"流式输出", fun() -> 164 io:format("~n=== 测试 GLM-4.6 Zhipu 流式输出 ===~n"), 165 Config = llm_client:config(zhipu, #{ 166 api_key => list_to_binary(ApiKey), 167 model => <<"glm-4.6">>, 168 max_tokens => 50, 169 timeout => 60000 170 }), 171 172 Messages = [#{role => user, content => <<"说'测试成功'三个字"/utf8>>}], 173 Self = self(), 174 Callback = fun(Event) -> 175 io:format("Stream event: ~p~n", [Event]), 176 Self ! {stream_event, Event} 177 end, 178 179 Result = llm_client:stream_chat(Config, Messages, Callback), 180 io:format("流式结果: ~p~n", [Result]), 181 182 ?assertMatch({ok, _}, Result), 183 {ok, Response} = Result, 184 Content = maps:get(content, Response), 185 ?assert(is_binary(Content)), 186 io:format("最终内容: ~ts~n", [Content]), 187 io:format("=== GLM-4.6 Zhipu 流式测试通过 ===~n~n") 188 end} 189 ]}} 190 end. 191 192 %%==================================================================== 193 %% 对比测试 194 %%==================================================================== 195 196 comparison_test_() -> 197 case os:getenv("ZHIPU_API_KEY") of 198 false -> 199 {"跳过对比测试 (未设置 ZHIPU_API_KEY)", []}; 200 ApiKey -> 201 {"Provider 对比测试", {timeout, 180, [ 202 {"相同问题对比", fun() -> 203 io:format("~n=== Provider 对比测试 ===~n"), 204 Question = <<"什么是 Erlang?请用一句话回答。"/utf8>>, 205 206 %% GLM-4.7 via Anthropic 207 Config1 = llm_client:config(anthropic, #{ 208 api_key => list_to_binary(ApiKey), 209 base_url => <<"https://open.bigmodel.cn/api/anthropic">>, 210 model => <<"glm-4.7">>, 211 max_tokens => 100, 212 timeout => 60000 213 }), 214 215 %% GLM-4.6 via Zhipu 216 Config2 = llm_client:config(zhipu, #{ 217 api_key => list_to_binary(ApiKey), 218 model => <<"glm-4.6">>, 219 max_tokens => 100, 220 timeout => 60000 221 }), 222 223 Messages = [#{role => user, content => Question}], 224 225 io:format("问题: ~ts~n~n", [Question]), 226 227 %% 测试 GLM-4.7 228 io:format("GLM-4.7 (Anthropic Provider):~n"), 229 {ok, Resp1} = llm_client:chat(Config1, Messages), 230 Content1 = maps:get(content, Resp1), 231 io:format(" 回复: ~ts~n~n", [Content1]), 232 233 %% 测试 GLM-4.6 234 io:format("GLM-4.6 (Zhipu Provider):~n"), 235 {ok, Resp2} = llm_client:chat(Config2, Messages), 236 Content2 = maps:get(content, Resp2), 237 io:format(" 回复: ~ts~n~n", [Content2]), 238 239 ?assert(byte_size(Content1) > 0), 240 ?assert(byte_size(Content2) > 0), 241 io:format("=== 对比测试通过 ===~n") 242 end} 243 ]}} 244 end.