'''
Version: 2.0
Author: Yue Zhong
Date: 2025-01-03 13:50:51
Description: 
LastEditors: Yue Zhong
LastEditTime: 2025-01-03 16:41:54
'''
# pip3 install transformers
# python3 deepseek_v2_tokenizer.py
__all__ = ['deep_tokenizer','glm4_tokenizer']

import transformers
import os
deepseek_v2_tokenizer_dir0 = './deepseek_v2_tokenizer'
glm_4_9b_tokenizer_dir0 = './glm_4_9b_tokenizer'
deepseek_tokenizer_dir = os.path.join(os.path.dirname(__file__),deepseek_v2_tokenizer_dir0)
glm_4_9b_tokenizer_dir = os.path.join(os.path.dirname(__file__),glm_4_9b_tokenizer_dir0)

glm4_tokenizer = transformers.AutoTokenizer.from_pretrained( 
        glm_4_9b_tokenizer_dir, trust_remote_code=True)

deep_tokenizer = transformers.AutoTokenizer.from_pretrained( 
        deepseek_tokenizer_dir, trust_remote_code=True)

