# Transformers Model, such as bert.
from calflops import calculate_flops
from transformers import AutoModel
from transformers import AutoTokenizer

access_token = ""  #私人密钥

batch_size, max_seq_length = 1, 16
model_name = "google/gemma-2b-it"
model_save = model_name
model = AutoModel.from_pretrained(model_save)
tokenizer = AutoTokenizer.from_pretrained(model_save)

flops, macs, params = calculate_flops(model=model, 
                                      input_shape=(batch_size,max_seq_length),
                                      transformer_tokenizer=tokenizer)
print("Bert(google/gemma-2b-it) FLOPs:%s   MACs:%s   Params:%s \n" %(flops, macs, params))