File size: 2,926 Bytes
4b9078a
 
 
9d3b01a
 
 
 
 
 
 
 
 
5102ec8
4b9078a
 
9d3b01a
 
 
 
 
 
 
 
 
 
 
4b9078a
9d3b01a
4b9078a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
# """
# 对各个llm模型进行单元测试
# """
def validate_path():
    import os, sys
    dir_name = os.path.dirname(__file__)
    root_dir_assume = os.path.abspath(os.path.dirname(__file__) +  '/..')
    os.chdir(root_dir_assume)
    sys.path.append(root_dir_assume)
    
validate_path() # validate path so you can run from base directory

from request_llm.bridge_moss import predict_no_ui_long_connection
# from request_llm.bridge_jittorllms_pangualpha import predict_no_ui_long_connection
# from request_llm.bridge_jittorllms_llama import predict_no_ui_long_connection

llm_kwargs = {
    'max_length': 512,
    'top_p': 1,
    'temperature': 1,
}

result = predict_no_ui_long_connection(inputs="你好", 
                                       llm_kwargs=llm_kwargs,
                                       history=[],
                                       sys_prompt="")
print('final result:', result)


result = predict_no_ui_long_connection(inputs="what is a hero?", 
                                       llm_kwargs=llm_kwargs,
                                       history=["hello world"],
                                       sys_prompt="")
print('final result:', result)

result = predict_no_ui_long_connection(inputs="如何理解传奇?", 
                                       llm_kwargs=llm_kwargs,
                                       history=[],
                                       sys_prompt="")
print('final result:', result)

# # print(result)
# from multiprocessing import Process, Pipe
# class GetGLMHandle(Process):
#     def __init__(self):
#         super().__init__(daemon=True)
#         pass
#     def run(self):
#         # 子进程执行
#         # 第一次运行,加载参数
#         def validate_path():
#             import os, sys
#             dir_name = os.path.dirname(__file__)
#             root_dir_assume = os.path.abspath(os.path.dirname(__file__) +  '/..')
#             os.chdir(root_dir_assume + '/request_llm/jittorllms')
#             sys.path.append(root_dir_assume + '/request_llm/jittorllms')
#         validate_path() # validate path so you can run from base directory

#         jittorllms_model = None
#         import types
#         try:
#             if jittorllms_model is None:
#                 from models import get_model
#                 # availabel_models = ["chatglm", "pangualpha", "llama", "chatrwkv"]
#                 args_dict = {'model': 'chatrwkv'}
#                 print('self.jittorllms_model = get_model(types.SimpleNamespace(**args_dict))')
#                 jittorllms_model = get_model(types.SimpleNamespace(**args_dict))
#                 print('done get model')
#         except:
#             # self.child.send('[Local Message] Call jittorllms fail 不能正常加载jittorllms的参数。')
#             raise RuntimeError("不能正常加载jittorllms的参数!")
        
# x = GetGLMHandle()
# x.start()


# input()