File size: 1,599 Bytes
8a5e8bc
 
 
 
 
47289f8
 
 
8a5e8bc
 
47289f8
 
 
8a5e8bc
17d0a32
 
 
 
 
15f14f5
8dd4d48
 
 
47289f8
17d0a32
 
 
8a5e8bc
 
47289f8
 
 
8a5e8bc
 
47289f8
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
# """
# 对各个llm模型进行单元测试
# """
def validate_path():
    import os, sys

    os.path.dirname(__file__)
    root_dir_assume = os.path.abspath(os.path.dirname(__file__) + "/..")
    os.chdir(root_dir_assume)
    sys.path.append(root_dir_assume)


validate_path()  # validate path so you can run from base directory
if __name__ == "__main__":
    # from request_llms.bridge_newbingfree import predict_no_ui_long_connection
    # from request_llms.bridge_moss import predict_no_ui_long_connection
    # from request_llms.bridge_jittorllms_pangualpha import predict_no_ui_long_connection
    # from request_llms.bridge_jittorllms_llama import predict_no_ui_long_connection
    # from request_llms.bridge_claude import predict_no_ui_long_connection
    # from request_llms.bridge_internlm import predict_no_ui_long_connection
    # from request_llms.bridge_deepseekcoder import predict_no_ui_long_connection
    # from request_llms.bridge_qwen_7B import predict_no_ui_long_connection
    from request_llms.bridge_qwen_local import predict_no_ui_long_connection

    # from request_llms.bridge_spark import predict_no_ui_long_connection
    # from request_llms.bridge_zhipu import predict_no_ui_long_connection
    # from request_llms.bridge_chatglm3 import predict_no_ui_long_connection

    llm_kwargs = {
        "max_length": 4096,
        "top_p": 1,
        "temperature": 1,
    }

    result = predict_no_ui_long_connection(
        inputs="请问什么是质子?", llm_kwargs=llm_kwargs, history=["你好", "我好!"], sys_prompt=""
    )
    print("final result:", result)