clamepending commited on
Commit
0b32d2d
·
verified ·
1 Parent(s): 8bba0f9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -1
app.py CHANGED
@@ -62,7 +62,6 @@ def tokenize(input):
62
  encoded = encode_sequence(input)
63
  return encoded, [decode_sequence([token]) for token in encoded], len(input)/len(encoded)
64
 
65
-
66
  examples = ["ayyyy whats up 👋", "Okay now picture little Bobby just a youngin' runnin' round", "Peace is when you leave it in the past, let it heal like a cast;When enough time pass, then you blast;Kinda like John Wick, bars like a convict;Fuck around and you don't wanna start shit, woo!"]
67
 
68
  intf = gr.Interface(fn=tokenize, inputs="text", outputs=["text", "text", gr.components.Number()], examples=examples, title = "Logic Tokenizer", description="Logic Tokenizer tokenizes your text based on BPE run on the top 10 songs by logic. The vocab size is 1024, and expanded from an original 256 from utf-8. The float output is the compression ratio of len(input)/len(encoded), and the array of integers are the tokens the model learned.")
 
62
  encoded = encode_sequence(input)
63
  return encoded, [decode_sequence([token]) for token in encoded], len(input)/len(encoded)
64
 
 
65
  examples = ["ayyyy whats up 👋", "Okay now picture little Bobby just a youngin' runnin' round", "Peace is when you leave it in the past, let it heal like a cast;When enough time pass, then you blast;Kinda like John Wick, bars like a convict;Fuck around and you don't wanna start shit, woo!"]
66
 
67
  intf = gr.Interface(fn=tokenize, inputs="text", outputs=["text", "text", gr.components.Number()], examples=examples, title = "Logic Tokenizer", description="Logic Tokenizer tokenizes your text based on BPE run on the top 10 songs by logic. The vocab size is 1024, and expanded from an original 256 from utf-8. The float output is the compression ratio of len(input)/len(encoded), and the array of integers are the tokens the model learned.")