"""
运行多个daemon.py的办法

比如用0-3 4个显卡卡运行4个daemon.py
CUDA_VISIBLE_DEVICES=0 python daemon.py
CUDA_VISIBLE_DEVICES=1 python daemon.py
CUDA_VISIBLE_DEVICES=2 python daemon.py
CUDA_VISIBLE_DEVICES=3 python daemon.py

"""

import time
import redis

# 连接本机6379端口上的redis的第0个数据库
rdb = redis.Redis('127.0.0.1', 6379, 0)
rdb.get('try_it')

print('-------------------------------------------------------')
print('正在加载模型……')

from transformers import AutoTokenizer, AutoModel

# model_name = "THUDM/chatglm2-6b-int4"
model_name = "/root/.cache/huggingface/hub/models--THUDM--chatglm2-6b-int4/snapshots/66ecaf1db3a5085714e133357ea4824b69698743"
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
model = AutoModel.from_pretrained(model_name,trust_remote_code=True).cuda()
model = model.eval()
print('模型已经加载完毕。')


def model_infer(xinput, username):
    """
    NLP LLM模型

    :param xinput: 输入文本
    :return: 根据输入文本的长度，输出相应的文本
    """
    # 获取聊天历史（目前不考虑多轮对话）
    xlog = []
    
    # 模型推理
    print('-------------history-----------------')
    for i, (xin, xout) in enumerate(xlog):
        print(i, '>>>>', xin)
        print(i, '<<<<', xout)
    print('-------------this turn---------------')
    print('>>>>', '>>>>', xinput)
    xoutput, history = model.chat(tokenizer, xinput, history=xlog)
    print('<<<<', '<<<<', xoutput)
    
    return xoutput


if '__main__' == __name__:

    print('------------------READY---------------------------')

    # 后台一直运行
    while True:

        # 从管道拿uuid
        xuuid = rdb.rpop('queue')

        if xuuid is None:
            # 没有数据则等1ms后重试
            time.sleep(0.001)
            continue
        xuuid = xuuid.decode('utf8')
        print('UUID:', xuuid)

        # 拿输入
        xinput = rdb.hget('uuid2input', xuuid)
        if xinput is None:
            # 为了健壮性
            time.sleep(0.001)
            continue
        xinput = xinput.decode('utf8')
        print('input:', xinput)
        
        # 拿用户名
        username = rdb.hget('uuid2username', xuuid)
        if username is None:
            # 为了健壮性
            time.sleep(0.001)
            continue
        username = username.decode('utf8')
        print('username:', username)

        # 模型推理
        xoutput = model_infer(xinput, username)
        print('output:', xoutput)

        # 把输出放回
        rdb.hset('uuid2output', xuuid, xoutput.encode('utf8'))
