from transformers import AutoTokenizer
from tqdm import tqdm

data = [
        "Please tell me which object I need to operate?",
        "translation_door.",
        "Please provide the 3D bounding box of the region this sentence describes: translation_door.",
        "[[0.43,0.61,0.89],[0.43,0.32,0.36],[0.42,0.62,0.88],[0.56,0.72,0.68],[0.59,0.44,0.13],[0.55,0.73,0.67],[0.60,0.43,0.14],[0.42,0.33,0.35]]",
        "Please provide the joint's type and its 3D axis linked to the object part [[0.43,0.61,0.89],[0.43,0.32,0.36],[0.42,0.62,0.88],[0.56,0.72,0.68],[0.59,0.44,0.13],[0.55,0.73,0.67],[0.60,0.43,0.14],[0.42,0.33,0.35]]",
        "Type:prismatic.[0.42,0.49,0.62,0.57,0.60,0.40]",
        "Please provide me with the gripper direction if I want to operate the object part [[0.43,0.61,0.89],[0.43,0.32,0.36],[0.42,0.62,0.88],[0.56,0.72,0.68],[0.59,0.44,0.13],[0.55,0.73,0.67],[0.60,0.43,0.14],[0.42,0.33,0.35]]",
        "[0.57,0.6,0.4,0.43,0.49,0.62]",
        "Please provide me with the force point if I want to operate the object part [[0.43,0.61,0.89],[0.43,0.32,0.36],[0.42,0.62,0.88],[0.56,0.72,0.68],[0.59,0.44,0.13],[0.55,0.73,0.67],[0.60,0.43,0.14],[0.42,0.33,0.35]]",
        "[0.57,0.58,0.4]",
        "Please provide the robot point move step if I want to open the object part [[0.43,0.61,0.89],[0.43,0.32,0.36],[0.42,0.62,0.88],[0.56,0.72,0.68],[0.59,0.44,0.13],[0.55,0.73,0.67],[0.60,0.43,0.14],[0.42,0.33,0.35]].The known axis type of this object is: prismatic",
        "Step 0: [0.42,0.47,0.62].Step 1: [0.38,0.43,0.7].Step 2: [0.33,0.39,0.78].Step 3: [0.28,0.35,0.86]."
    ]
# model_list = ['gpt2',
#               'bert-base-cased',
#               't5-small',
#               'facebook/bart-base',
#               'meta-llama/Llama-2-7b',
#               'meta-llama/Meta-Llama-3-70B',
#               'meta-llama/Llama-3.1-70B',
#               'meta-llama/Llama-3.2-1B']
model_list = ['gpt2',
              'bert-base-cased',
              'facebook/bart-base',
              'nghuyong/ernie-3.0-base-zh',
               'Ours'
              ]
ans_list = []
def get_token_count(model_name: str):
    total = 0
    if model_name == 'Ours':
        total = 637
    else:
        tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
        for item in data:
            encoded_input = tokenizer(item, return_tensors='pt')
            input_token_count = len(encoded_input['input_ids'][0])
            total += input_token_count

    ans_list.append(f"{model_name}输入输出文本的Token数量: {total}")

for model_name in tqdm(model_list):
    get_token_count(model_name)
for ans in ans_list:
    print(ans)