Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -25,7 +25,7 @@ def tokenize(input_text):
|
|
25 |
t5_tokens = t5_tokenizer(input_text, add_special_tokens=True)["input_ids"]
|
26 |
|
27 |
|
28 |
-
return f"LLaMa-1: {len(llama1_tokens)}\nLLaMa-2: {len(llama2_tokens)}\nMistral: {len(mistral_tokens)}
|
29 |
|
30 |
|
31 |
if __name__ == "__main__":
|
|
|
25 |
t5_tokens = t5_tokenizer(input_text, add_special_tokens=True)["input_ids"]
|
26 |
|
27 |
|
28 |
+
return f"LLaMa-1: {len(llama1_tokens)}\nLLaMa-2: {len(llama2_tokens)}\nMistral: {len(mistral_tokens)}\nGPT-2/GPT-J: {len(gpt2_tokens)}\nGPT-NeoX: {len(gpt_neox_tokens)}\nFalcon: {len(falcon_tokens)}\nPhi-2: {len(phi2_tokens)}\nT5: {len(t5_tokens)}"
|
29 |
|
30 |
|
31 |
if __name__ == "__main__":
|