tokenizer-arena / app.py
xu-song's picture
remove vocabs; update compression_app; add character_app;
2bd606a
raw
history blame
675 Bytes
from playground_app import demo as playground_tab
from compression_app import demo as compression_tab
from character_app import demo as character_tab
from patcher.gr_interface import TabbedInterface
demo = TabbedInterface(
[playground_tab, compression_tab, character_tab],
[" ⚔️ Playground", "🏆 Compression Leaderboard", "📊 Character Statistics"], # 编码速度,解码速度,字符分类(zh、num等,支持正则),支持的语言,机构,。
title='<div align="center">Tokenizer Arena ⚔️</div>',
css="css/style.css"
)
demo.load(js=open("js/onload.js", "r", encoding="utf-8").read())
if __name__ == "__main__":
demo.launch()