concedo commited on
Commit
a4722da
1 Parent(s): ce96772

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -3
app.py CHANGED
@@ -17,7 +17,6 @@ def tokenize(input_text):
17
  phi3_tokens = phi3_tokenizer(input_text, add_special_tokens=True)["input_ids"]
18
  t5_tokens = t5_tokenizer(input_text, add_special_tokens=True)["input_ids"]
19
  gemma_tokens = gemma_tokenizer(input_text, add_special_tokens=True)["input_ids"]
20
- command_r_tokens = command_r_tokenizer(input_text, add_special_tokens=True)["input_ids"]
21
  qwen_tokens = qwen_tokenizer(input_text, add_special_tokens=True)["input_ids"]
22
  codeqwen_tokens = codeqwen_tokenizer(input_text, add_special_tokens=True)["input_ids"]
23
  rwkv4_tokens = rwkv4_tokenizer(input_text, add_special_tokens=True)["input_ids"]
@@ -38,7 +37,6 @@ def tokenize(input_text):
38
  "Phi-3": phi3_tokens,
39
  "T5": t5_tokens,
40
  "Gemma": gemma_tokens,
41
- "Command-R": command_r_tokens,
42
  "Qwen/Qwen1.5": qwen_tokens,
43
  "CodeQwen": codeqwen_tokens,
44
  "RWKV-v4": rwkv4_tokens,
@@ -65,7 +63,6 @@ if __name__ == "__main__":
65
  phi3_tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3-mini-4k-instruct")
66
  t5_tokenizer = AutoTokenizer.from_pretrained("google/flan-t5-xxl")
67
  gemma_tokenizer = AutoTokenizer.from_pretrained("alpindale/gemma-2b")
68
- command_r_tokenizer = AutoTokenizer.from_pretrained("CohereForAI/c4ai-command-r-plus")
69
  qwen_tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen1.5-7B")
70
  codeqwen_tokenizer = AutoTokenizer.from_pretrained("Qwen/CodeQwen1.5-7B")
71
  rwkv4_tokenizer = AutoTokenizer.from_pretrained("RWKV/rwkv-4-14b-pile", trust_remote_code=True)
 
17
  phi3_tokens = phi3_tokenizer(input_text, add_special_tokens=True)["input_ids"]
18
  t5_tokens = t5_tokenizer(input_text, add_special_tokens=True)["input_ids"]
19
  gemma_tokens = gemma_tokenizer(input_text, add_special_tokens=True)["input_ids"]
 
20
  qwen_tokens = qwen_tokenizer(input_text, add_special_tokens=True)["input_ids"]
21
  codeqwen_tokens = codeqwen_tokenizer(input_text, add_special_tokens=True)["input_ids"]
22
  rwkv4_tokens = rwkv4_tokenizer(input_text, add_special_tokens=True)["input_ids"]
 
37
  "Phi-3": phi3_tokens,
38
  "T5": t5_tokens,
39
  "Gemma": gemma_tokens,
 
40
  "Qwen/Qwen1.5": qwen_tokens,
41
  "CodeQwen": codeqwen_tokens,
42
  "RWKV-v4": rwkv4_tokens,
 
63
  phi3_tokenizer = AutoTokenizer.from_pretrained("microsoft/Phi-3-mini-4k-instruct")
64
  t5_tokenizer = AutoTokenizer.from_pretrained("google/flan-t5-xxl")
65
  gemma_tokenizer = AutoTokenizer.from_pretrained("alpindale/gemma-2b")
 
66
  qwen_tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen1.5-7B")
67
  codeqwen_tokenizer = AutoTokenizer.from_pretrained("Qwen/CodeQwen1.5-7B")
68
  rwkv4_tokenizer = AutoTokenizer.from_pretrained("RWKV/rwkv-4-14b-pile", trust_remote_code=True)