HeshamHaroon commited on
Commit
9998155
1 Parent(s): 66b3df6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -12
app.py CHANGED
@@ -1,18 +1,17 @@
1
  from gradio import Interface
2
  import gradio as gr
3
  import aranizer
4
- from aranizer import aranizer_bpe32k, aranizer_bpe50k, aranizer_bpe64k, aranizer_bpe86k, aranizer_sp32k, aranizer_sp50k, aranizer_sp64k, aranizer_sp86k
5
 
6
- # Load all available tokenizers
7
  tokenizers = {
8
- "aranizer_bpe32k": aranizer.aranizer_bpe32k.get_tokenizer(),
9
- "aranizer_bpe50k": aranizer.aranizer_bpe50k.get_tokenizer(),
10
- "aranizer_bpe64k": aranizer.aranizer_bpe64k.get_tokenizer(),
11
- "aranizer_bpe86k": aranizer.aranizer_bpe86k.get_tokenizer(),
12
- "aranizer_sp32k": aranizer.aranizer_sp32k.get_tokenizer(),
13
- "aranizer_sp50k": aranizer.aranizer_sp50k.get_tokenizer(),
14
- "aranizer_sp64k": aranizer.aranizer_sp64k.get_tokenizer(),
15
- "aranizer_sp86k": aranizer.aranizer_sp86k.get_tokenizer(),
16
  }
17
 
18
  def compare_tokenizers(text):
@@ -21,13 +20,16 @@ def compare_tokenizers(text):
21
  tokens = tokenizer.tokenize(text)
22
  encoded_output = tokenizer.encode(text, add_special_tokens=True)
23
  decoded_text = tokenizer.decode(encoded_output)
 
24
  results.append((name, tokens, encoded_output, decoded_text))
25
  return results
26
 
27
- # Define the Gradio interface components properly based on the Gradio API
28
  inputs_component = gr.components.Textbox(lines=2, placeholder="Enter Arabic text here...", label="Input Text")
29
  outputs_component = gr.components.Table(label="Results", headers=["Tokenizer", "Tokens", "Encoded Output", "Decoded Text"])
30
 
31
- iface = Interface(fn=compare_tokenizers, inputs=inputs_component, outputs=outputs_component)
 
32
 
 
33
  iface.launch()
 
1
  from gradio import Interface
2
  import gradio as gr
3
  import aranizer
4
+ from aranizer import aranizer_bpe50k, aranizer_bpe64k, aranizer_bpe86k, aranizer_sp32k, aranizer_sp50k, aranizer_sp64k, aranizer_sp86k
5
 
6
+ # Correct way to load all available tokenizers as per the provided usage information
7
  tokenizers = {
8
+ "aranizer_bpe50k": aranizer_bpe50k.get_tokenizer(),
9
+ "aranizer_bpe64k": aranizer_bpe64k.get_tokenizer(),
10
+ "aranizer_bpe86k": aranizer_bpe86k.get_tokenizer(),
11
+ "aranizer_sp32k": aranizer_sp32k.get_tokenizer(),
12
+ "aranizer_sp50k": aranizer_sp50k.get_tokenizer(),
13
+ "aranizer_sp64k": aranizer_sp64k.get_tokenizer(),
14
+ "aranizer_sp86k": aranizer_sp86k.get_tokenizer(),
 
15
  }
16
 
17
  def compare_tokenizers(text):
 
20
  tokens = tokenizer.tokenize(text)
21
  encoded_output = tokenizer.encode(text, add_special_tokens=True)
22
  decoded_text = tokenizer.decode(encoded_output)
23
+ # Collect each tokenizer's results
24
  results.append((name, tokens, encoded_output, decoded_text))
25
  return results
26
 
27
+ # Correctly use Gradio's components for inputs and outputs
28
  inputs_component = gr.components.Textbox(lines=2, placeholder="Enter Arabic text here...", label="Input Text")
29
  outputs_component = gr.components.Table(label="Results", headers=["Tokenizer", "Tokens", "Encoded Output", "Decoded Text"])
30
 
31
+ # Setting up the interface
32
+ iface = Interface(fn=compare_tokenizers, inputs=inputs_component, outputs=outputs_component, title="AraNizer Tokenizer Comparison")
33
 
34
+ # Launching the Gradio app
35
  iface.launch()