from transformers import AutoTokenizer, AutoModelForCausalLM
import torch

# 指定本地模型路径
model_path = "/home/fangning/work/LLM/models/deepseek-ai/deepseek-coder-1.3b-base"

# 从本地加载tokenizer和模型
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
model = AutoModelForCausalLM.from_pretrained(
    model_path, 
    trust_remote_code=True,
    torch_dtype=torch.float16,  # 使用半精度减少显存占用
    device_map="auto"  # 自动分配设备
)

input_text = "#帮我创建一个用户表，字段是id name和 age，帮我写dml和dql的语句"
inputs = tokenizer(input_text, return_tensors="pt").to(model.device)
outputs = model.generate(**inputs, max_length=128)
print(tokenizer.decode(outputs[0], skip_special_tokens=True))