from vllm import LLM, SamplingParams
# from ...config import qwen_model_dir,tensor_parallel_size
# python -m raptor.chunk.llmchuncker.vllm_inference

def _map_roles(
    messages,
    role_map,
) :
    """Map the message roles."""
    output = []
    for message in messages:
        role = message["role"]
        if role in role_map:
            content: str | None = (
                message["content"] if isinstance(message["content"], str) else None
            )
            output.append((role_map[role], content))
    return output

def _format_chatml(
    system_message: str, messages, sep: str
) -> str:
    """Format the prompt with the chatml style."""
    ret = "" if system_message == "" else system_message + sep + "\n"
    for role, message in messages:
        if message:
            ret += role + "\n" + message + sep + "\n"
        else:
            ret += role + "\n"
    return ret

def format_qwen(
    messages
) :
    _roles = dict(user="<|im_start|>user", assistant="<|im_start|>assistant")
    system_message = "你是一个分段专家。"
    system_template = "<|im_start|>system\n{system_message}"
    system_message = system_template.format(system_message=system_message)
    _messages = _map_roles(messages, _roles)
    _messages.append((_roles["assistant"], None))
    _sep = "<|im_end|>"
    _prompt = _format_chatml(system_message, _messages, _sep)
    _sep2 = "<|endoftext|>"
    return _prompt

message=[{'role':'system','content':'你是一个分段专家。'},{'role':'user','content':'请帮我把这段话按照语义信息分成多个文本段，并结合上下文替换指代不明的地方。分隔符采用“||”表示。例如：\n文本：俄罗斯反对派领袖、普京最尖锐的批评者纳瓦尔尼（Alexei Navalny）据报在位于北极圈的监狱猝逝，终年47岁。十多年来，他揭露了俄罗斯权力核心的腐败问题，其影片在网上获得了数千万次观看。2020年，他遭神经毒剂毒害，在国外治疗后回国，随即被监禁至今。据报导，在他去世后，俄罗斯有100多名街头抗议者被拘留。当局警告人们不要参与集会。'}]
print(format_qwen(message))

# sampling_params = SamplingParams(temperature=0.7, top_p=0.8,repetition_penalty=1,top_k=20,max_tokens=512)

# llm = LLM(model=qwen_model_dir,tensor_parallel_size=tensor_parallel_size)

# outputs = llm.generate(format_qwen(message), sampling_params)
# # output = llm.generate()
# print(outputs)
