# pip3 install transformers
# python3 deepseek_tokenizer.py
import transformers

chat_tokenizer_dir = "./"

tokenizer = transformers.AutoTokenizer.from_pretrained( 
        chat_tokenizer_dir, trust_remote_code=True
        
        )

def count_tokens(text, tokenizer):
    token_ids = tokenizer.encode(text)
    return len(token_ids)

# 测试
text = "您好！我是由中国的深度求索（DeepSeek）公司开发的智能助手DeepSeek-V3。有关模型和产品的详细内容请参考官方文档。"
token_count = count_tokens(text, tokenizer)


print("DeepSeek Token 量 估算：",token_count)
