import torch
from transformers import AutoTokenizer,AutoModelForCausalLM
from modelscope import AutoModelForCausalLM, AutoTokenizer as MsTokenizer
from typing import List,Dict,Optional
import time

class LocalQwen:
    def __init__(self,model_path: str,device: str="cuda" if torch.cuda.is_available() else "cpu"):
        self.model_path=model_path
        self.device= device

        print(f"正在加载模型：{model_path}")
        print(f'使用设备：{device}')

        # 使用modelscope框架加载模型
        self.tokenizer=MsTokenizer.from_pretrained(model_path,trust_remote_code=True)
        self.model=AutoModelForCausalLM.from_pretrained(
            model_path,
            device_map="auto",
            torch_dtype=torch.float32 if self.device == "cuda" else torch.float16,
            trust_remote_code=True
        )
        self.model.eval()
        print('模型加载成功')
    
    def generate(self,prompt: str,max_new_tokens: int=512,temperature: float=0.7)->str:
        if not prompt.strip():
            return "输入为空"
        
        # 记录对话历史
        messages=[{
            "role":"user","content":prompt
        }]
        try:
            input_ids = self.tokenizer(messages,return_tensors="pt",padding=True).input_ids
            input_ids = input_ids.to(self.device)

            with torch.no_grad():
                gen_ids= self.model.generate(
                    input_ids,
                    max_new_tokens=max_new_tokens,
                    temperature=temperature,
                    top_p=0.7,
                    do_sample=True,
                    pad_token_id=self.tokenizer.eos_token_id
                )
                gen_ids=gen_ids[:,input_ids.shape[-1]:]
                response=self.tokenizer.decode(gen_ids[0],skip_special_tokens=True)
                return response.strip()
        except Exception as e:
            return f"生成失败{str(e)}"
        
    def chat(self, question: str,context: str= "")->str:
        return self.generate(question)
    


if __name__ =="__main__":
    MODEL_PATH="e:/llm/modelscope_models/Qwen3-0.6B"
    qwen=LocalQwen()
    test_case=["高血压有什么症状?","如何预防高血压?"]
    for q in test_case:
        answer=qwen.chat(q)
        print(answer)

    