multimodalart HF staff commited on
Commit
c8c7b71
1 Parent(s): 5d73026

update to playable

Browse files
Files changed (3) hide show
  1. app.py +66 -20
  2. mario.jar +0 -0
  3. requirements.txt +2 -1
app.py CHANGED
@@ -1,19 +1,53 @@
 
1
  import gradio as gr
2
  import torch
 
3
  from mario_gpt.dataset import MarioDataset
4
  from mario_gpt.prompter import Prompter
5
  from mario_gpt.lm import MarioLM
6
  from mario_gpt.utils import view_level, convert_level_to_png
7
 
8
- mario_lm = MarioLM()
 
 
 
9
 
 
10
  device = torch.device('cuda')
11
  mario_lm = mario_lm.to(device)
12
  TILE_DIR = "data/tiles"
13
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
 
 
 
 
 
 
 
 
 
 
 
 
15
 
16
- def update(pipes, enemies, blocks, elevation, temperature = 2.0, level_size = 1399, prompt = ""):
17
  if prompt == "":
18
  prompt = f"{pipes} pipes, {enemies} enemies, {blocks} blocks, {elevation} elevation"
19
  print(f"Using prompt: {prompt}")
@@ -24,28 +58,40 @@ def update(pipes, enemies, blocks, elevation, temperature = 2.0, level_size = 13
24
  temperature=temperature,
25
  use_tqdm=True
26
  )
 
27
  img = convert_level_to_png(generated_level.squeeze(), TILE_DIR, mario_lm.tokenizer)[0]
28
- return img
 
 
 
 
 
29
 
30
  with gr.Blocks() as demo:
31
- gr.Markdown("## Demo for ['MarioGPT: Open-Ended Text2Level Generation through Large Language Models'](https://github.com/shyamsn97/mario-gpt). Enter a text prompt or select parameters from below!")
32
-
33
- text_prompt = gr.Textbox(value="", label="Enter your MarioGPT prompt. ex: 'many pipes, many enemies, some blocks, low elevation', or compose your prompts below")
34
- with gr.Accordion(label="Compose your prompt", open=False):
35
- pipes = gr.Radio(["no", "little", "some", "many"], label="pipes")
36
- enemies = gr.Radio(["no", "little", "some", "many"], label="enemies")
37
- blocks = gr.Radio(["little", "some", "many"], label="blocks")
38
- elevation = gr.Radio(["low", "high"], label="elevation")
39
- temperature = gr.Number(value=2.0, label="temperature: Increase these for more stochastic, but lower quality, generations")
 
 
 
 
 
 
 
40
  level_size = gr.Number(value=1399, precision=0, label="level_size")
41
 
42
  btn = gr.Button("Generate level")
43
- level_image = gr.Image()
44
-
45
-
46
-
47
-
48
- btn.click(fn=update, inputs=[pipes, enemies, blocks, elevation, temperature, level_size, text_prompt], outputs=level_image)
49
  gr.Examples(
50
  examples=[
51
  ["many", "many", "some", "high"],
@@ -54,8 +100,8 @@ with gr.Blocks() as demo:
54
  ["no", "no", "many", "high", 2.4],
55
  ],
56
  inputs=[pipes, enemies, blocks, elevation],
57
- outputs=level_image,
58
- fn=update,
59
  cache_examples=True,
60
  )
61
  demo.launch()
 
1
+
2
  import gradio as gr
3
  import torch
4
+ import uuid
5
  from mario_gpt.dataset import MarioDataset
6
  from mario_gpt.prompter import Prompter
7
  from mario_gpt.lm import MarioLM
8
  from mario_gpt.utils import view_level, convert_level_to_png
9
 
10
+ import os
11
+ import subprocess
12
+
13
+ from pyngrok import ngrok
14
 
15
+ mario_lm = MarioLM()
16
  device = torch.device('cuda')
17
  mario_lm = mario_lm.to(device)
18
  TILE_DIR = "data/tiles"
19
 
20
+ subprocess.Popen(["python3","-m","http.server","7861"])
21
+ ngrok.set_auth_token(os.environ.get('NGROK_TOKEN'))
22
+ http_tunnel = ngrok.connect(7861,bind_tls=True)
23
+
24
+ def make_html_file(generated_level):
25
+ level_text = f"""{'''
26
+ '''.join(view_level(generated_level,mario_lm.tokenizer))}"""
27
+ unique_id = uuid.uuid1()
28
+ with open(f"demo-{unique_id}.html", 'w', encoding='utf-8') as f:
29
+ f.write(f'''<!DOCTYPE html>
30
+ <html lang="en">
31
+
32
+ <head>
33
+ <meta charset="utf-8">
34
+ <title>Mario Game</title>
35
+ <script src="https://cjrtnc.leaningtech.com/20230216/loader.js"></script>
36
+ </head>
37
 
38
+ <body>
39
+ </body>
40
+ <script>
41
+ cheerpjInit().then(function () {{
42
+ cheerpjAddStringFile("/str/mylevel.txt", `{level_text}`);
43
+ }});
44
+ cheerpjCreateDisplay(512, 500);
45
+ cheerpjRunJar("/app/mario.jar");
46
+ </script>
47
+ </html>''')
48
+ return f"demo-{unique_id}.html"
49
 
50
+ def generate(pipes, enemies, blocks, elevation, temperature = 2.0, level_size = 1399, prompt = ""):
51
  if prompt == "":
52
  prompt = f"{pipes} pipes, {enemies} enemies, {blocks} blocks, {elevation} elevation"
53
  print(f"Using prompt: {prompt}")
 
58
  temperature=temperature,
59
  use_tqdm=True
60
  )
61
+ filename = make_html_file(generated_level)
62
  img = convert_level_to_png(generated_level.squeeze(), TILE_DIR, mario_lm.tokenizer)[0]
63
+
64
+ gradio_html = f'''<div style="border: 2px solid;">
65
+ <iframe width=512 height=512 style="margin: 0 auto" src="{http_tunnel.public_url}/{filename}"></iframe>
66
+ <p style="text-align:center">Press the arrow keys to move. Press <code>s</code> to jump and <code>a</code> to shoot flowers</p>
67
+ </div>'''
68
+ return [img, gradio_html]
69
 
70
  with gr.Blocks() as demo:
71
+ gr.Markdown('''### Playable demo for MarioGPT: Open-Ended Text2Level Generation through Large Language Models
72
+ [[Github](https://github.com/shyamsn97/mario-gpt)], [[Paper](https://arxiv.org/abs/2302.05981)]
73
+ ''')
74
+ with gr.Tabs():
75
+ with gr.TabItem("Type prompt"):
76
+ text_prompt = gr.Textbox(value="", label="Enter your MarioGPT prompt. ex: 'many pipes, many enemies, some blocks, low elevation'")
77
+ with gr.TabItem("Compose prompt"):
78
+ with gr.Row():
79
+ pipes = gr.Radio(["no", "little", "some", "many"], label="pipes")
80
+ enemies = gr.Radio(["no", "little", "some", "many"], label="enemies")
81
+ with gr.Row():
82
+ blocks = gr.Radio(["little", "some", "many"], label="blocks")
83
+ elevation = gr.Radio(["low", "high"], label="elevation")
84
+
85
+ with gr.Accordion(label="Advanced settings", open=False):
86
+ temperature = gr.Number(value=2.0, label="temperature: Increase these for more diverse, but lower quality, generations")
87
  level_size = gr.Number(value=1399, precision=0, label="level_size")
88
 
89
  btn = gr.Button("Generate level")
90
+ with gr.Row():
91
+ with gr.Box():
92
+ level_play = gr.HTML()
93
+ level_image = gr.Image()
94
+ btn.click(fn=generate, inputs=[pipes, enemies, blocks, elevation, temperature, level_size, text_prompt], outputs=[level_image, level_play])
 
95
  gr.Examples(
96
  examples=[
97
  ["many", "many", "some", "high"],
 
100
  ["no", "no", "many", "high", 2.4],
101
  ],
102
  inputs=[pipes, enemies, blocks, elevation],
103
+ outputs=[level_image, level_play],
104
+ fn=generate,
105
  cache_examples=True,
106
  )
107
  demo.launch()
mario.jar ADDED
Binary file (954 kB). View file
 
requirements.txt CHANGED
@@ -1,4 +1,5 @@
1
  torch
2
  transformers
3
  scipy
4
- tqdm
 
 
1
  torch
2
  transformers
3
  scipy
4
+ tqdm
5
+ pyngrok