freddyaboulton HF Staff commited on
Commit
19ab9d8
·
verified ·
1 Parent(s): 32838f6

Commit 1: Add 50 file(s)

Browse files
demos/audio_debugger/run.ipynb CHANGED
@@ -1 +1 @@
1
- {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: audio_debugger"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import subprocess\n", "import os\n", "\n", "# get_audio returns the path to the audio file\n", "audio_file = gr.get_audio(\"cantina.wav\")\n", "\n", "with gr.Blocks() as demo:\n", " with gr.Tab(\"Audio\"):\n", " gr.Audio(audio_file)\n", " with gr.Tab(\"Interface\"):\n", " gr.Interface(\n", " lambda x: x, \"audio\", \"audio\", examples=[audio_file], cache_examples=True\n", " )\n", " with gr.Tab(\"Streaming\"):\n", " gr.Interface(\n", " lambda x: x,\n", " gr.Audio(streaming=True),\n", " \"audio\",\n", " examples=[audio_file],\n", " cache_examples=True,\n", " )\n", " with gr.Tab(\"console\"):\n", " ip = gr.Textbox(label=\"User IP Address\")\n", " gr.Interface(\n", " lambda cmd: subprocess.run([cmd], capture_output=True, shell=True, check=False)\n", " .stdout.decode(\"utf-8\")\n", " .strip(),\n", " \"text\",\n", " \"text\",\n", " )\n", "\n", " def get_ip(request: gr.Request):\n", " return request.client.host\n", "\n", " demo.load(get_ip, None, ip)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
 
1
+ {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: audio_debugger"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import subprocess\n", "\n", "# get_audio returns the path to the audio file\n", "audio_file = gr.get_audio(\"cantina.wav\")\n", "\n", "with gr.Blocks() as demo:\n", " with gr.Tab(\"Audio\"):\n", " gr.Audio(audio_file, buttons=[\"download\"])\n", " with gr.Tab(\"Interface\"):\n", " gr.Interface(\n", " lambda x: x, \n", " gr.Audio(),\n", " gr.Audio(),\n", " examples=[audio_file], \n", " cache_examples=True,\n", " api_name=\"predict\"\n", " )\n", " with gr.Tab(\"Streaming\"):\n", " gr.Interface(\n", " lambda x: x,\n", " gr.Audio(streaming=True),\n", " \"audio\",\n", " examples=[audio_file],\n", " cache_examples=True,\n", " api_name=\"predict\"\n", " )\n", " with gr.Tab(\"console\"):\n", " ip = gr.Textbox(label=\"User IP Address\")\n", " gr.Interface(\n", " lambda cmd: subprocess.run([cmd], capture_output=True, shell=True, check=False)\n", " .stdout.decode(\"utf-8\")\n", " .strip(),\n", " \"text\",\n", " \"text\",\n", " api_name=\"predict\"\n", " )\n", "\n", " def get_ip(request: gr.Request):\n", " return request.client.host\n", "\n", " demo.load(get_ip, None, ip)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
demos/audio_debugger/run.py CHANGED
@@ -1,16 +1,20 @@
1
  import gradio as gr
2
  import subprocess
3
- import os
4
 
5
  # get_audio returns the path to the audio file
6
  audio_file = gr.get_audio("cantina.wav")
7
 
8
  with gr.Blocks() as demo:
9
  with gr.Tab("Audio"):
10
- gr.Audio(audio_file)
11
  with gr.Tab("Interface"):
12
  gr.Interface(
13
- lambda x: x, "audio", "audio", examples=[audio_file], cache_examples=True
 
 
 
 
 
14
  )
15
  with gr.Tab("Streaming"):
16
  gr.Interface(
@@ -19,6 +23,7 @@ with gr.Blocks() as demo:
19
  "audio",
20
  examples=[audio_file],
21
  cache_examples=True,
 
22
  )
23
  with gr.Tab("console"):
24
  ip = gr.Textbox(label="User IP Address")
@@ -28,6 +33,7 @@ with gr.Blocks() as demo:
28
  .strip(),
29
  "text",
30
  "text",
 
31
  )
32
 
33
  def get_ip(request: gr.Request):
 
1
  import gradio as gr
2
  import subprocess
 
3
 
4
  # get_audio returns the path to the audio file
5
  audio_file = gr.get_audio("cantina.wav")
6
 
7
  with gr.Blocks() as demo:
8
  with gr.Tab("Audio"):
9
+ gr.Audio(audio_file, buttons=["download"])
10
  with gr.Tab("Interface"):
11
  gr.Interface(
12
+ lambda x: x,
13
+ gr.Audio(),
14
+ gr.Audio(),
15
+ examples=[audio_file],
16
+ cache_examples=True,
17
+ api_name="predict"
18
  )
19
  with gr.Tab("Streaming"):
20
  gr.Interface(
 
23
  "audio",
24
  examples=[audio_file],
25
  cache_examples=True,
26
+ api_name="predict"
27
  )
28
  with gr.Tab("console"):
29
  ip = gr.Textbox(label="User IP Address")
 
33
  .strip(),
34
  "text",
35
  "text",
36
+ api_name="predict"
37
  )
38
 
39
  def get_ip(request: gr.Request):
demos/blocks_essay/run.ipynb CHANGED
@@ -1 +1 @@
1
- {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: blocks_essay"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "countries_cities_dict = {\n", " \"USA\": [\"New York\", \"Los Angeles\", \"Chicago\"],\n", " \"Canada\": [\"Toronto\", \"Montreal\", \"Vancouver\"],\n", " \"Pakistan\": [\"Karachi\", \"Lahore\", \"Islamabad\"],\n", "}\n", "\n", "def change_textbox(choice):\n", " if choice == \"short\":\n", " return gr.Textbox(lines=2, visible=True), gr.Button(interactive=True)\n", " elif choice == \"long\":\n", " return gr.Textbox(lines=8, visible=True, value=\"Lorem ipsum dolor sit amet\"), gr.Button(interactive=True)\n", " else:\n", " return gr.Textbox(visible=False), gr.Button(interactive=False)\n", "\n", "with gr.Blocks() as demo:\n", " radio = gr.Radio(\n", " [\"short\", \"long\", \"none\"], label=\"What kind of essay would you like to write?\"\n", " )\n", " text = gr.Textbox(lines=2, interactive=True, show_copy_button=True)\n", "\n", " with gr.Row():\n", " num = gr.Number(minimum=0, maximum=100, label=\"input\")\n", " out = gr.Number(label=\"output\")\n", " minimum_slider = gr.Slider(0, 100, 0, label=\"min\")\n", " maximum_slider = gr.Slider(0, 100, 100, label=\"max\")\n", " submit_btn = gr.Button(\"Submit\", variant=\"primary\")\n", "\n", " with gr.Row():\n", " country = gr.Dropdown(list(countries_cities_dict.keys()), label=\"Country\")\n", " cities = gr.Dropdown([], label=\"Cities\")\n", " @country.change(inputs=country, outputs=cities)\n", " def update_cities(country):\n", " cities = list(countries_cities_dict[country])\n", " return gr.Dropdown(choices=cities, value=cities[0], interactive=True)\n", "\n", " def reset_bounds(minimum, maximum):\n", " return gr.Number(minimum=minimum, maximum=maximum)\n", "\n", " radio.change(fn=change_textbox, inputs=radio, outputs=[text, submit_btn])\n", " gr.on(\n", " [minimum_slider.change, maximum_slider.change],\n", " reset_bounds,\n", " [minimum_slider, maximum_slider],\n", " outputs=num,\n", " )\n", " num.submit(lambda x: x, num, out)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
 
1
+ {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: blocks_essay"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "countries_cities_dict = {\n", " \"USA\": [\"New York\", \"Los Angeles\", \"Chicago\"],\n", " \"Canada\": [\"Toronto\", \"Montreal\", \"Vancouver\"],\n", " \"Pakistan\": [\"Karachi\", \"Lahore\", \"Islamabad\"],\n", "}\n", "\n", "def change_textbox(choice):\n", " if choice == \"short\":\n", " return gr.Textbox(lines=2, visible=True), gr.Button(interactive=True)\n", " elif choice == \"long\":\n", " return gr.Textbox(lines=8, visible=True, value=\"Lorem ipsum dolor sit amet\"), gr.Button(interactive=True)\n", " else:\n", " return gr.Textbox(visible=False), gr.Button(interactive=False)\n", "\n", "with gr.Blocks() as demo:\n", " radio = gr.Radio(\n", " [\"short\", \"long\", \"none\"], label=\"What kind of essay would you like to write?\"\n", " )\n", " text = gr.Textbox(lines=2, interactive=True, buttons=[\"copy\"])\n", "\n", " with gr.Row():\n", " num = gr.Number(minimum=0, maximum=100, label=\"input\")\n", " out = gr.Number(label=\"output\")\n", " minimum_slider = gr.Slider(0, 100, 0, label=\"min\")\n", " maximum_slider = gr.Slider(0, 100, 100, label=\"max\")\n", " submit_btn = gr.Button(\"Submit\", variant=\"primary\")\n", "\n", " with gr.Row():\n", " country = gr.Dropdown(list(countries_cities_dict.keys()), label=\"Country\")\n", " cities = gr.Dropdown([], label=\"Cities\")\n", " @country.change(inputs=country, outputs=cities)\n", " def update_cities(country):\n", " cities = list(countries_cities_dict[country])\n", " return gr.Dropdown(choices=cities, value=cities[0], interactive=True)\n", "\n", " def reset_bounds(minimum, maximum):\n", " return gr.Number(minimum=minimum, maximum=maximum)\n", "\n", " radio.change(fn=change_textbox, inputs=radio, outputs=[text, submit_btn])\n", " gr.on(\n", " [minimum_slider.change, maximum_slider.change],\n", " reset_bounds,\n", " [minimum_slider, maximum_slider],\n", " outputs=num,\n", " )\n", " num.submit(lambda x: x, num, out)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
demos/blocks_essay/run.py CHANGED
@@ -18,7 +18,7 @@ with gr.Blocks() as demo:
18
  radio = gr.Radio(
19
  ["short", "long", "none"], label="What kind of essay would you like to write?"
20
  )
21
- text = gr.Textbox(lines=2, interactive=True, show_copy_button=True)
22
 
23
  with gr.Row():
24
  num = gr.Number(minimum=0, maximum=100, label="input")
 
18
  radio = gr.Radio(
19
  ["short", "long", "none"], label="What kind of essay would you like to write?"
20
  )
21
+ text = gr.Textbox(lines=2, interactive=True, buttons=["copy"])
22
 
23
  with gr.Row():
24
  num = gr.Number(minimum=0, maximum=100, label="input")
demos/blocks_multiple_event_triggers/run.ipynb CHANGED
@@ -1 +1 @@
1
- {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: blocks_multiple_event_triggers"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio plotly pypistats python-dateutil "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import pypistats # type: ignore\n", "from datetime import date\n", "from dateutil.relativedelta import relativedelta\n", "import pandas as pd\n", "\n", "def get_plot(lib, time):\n", " data = pypistats.overall(lib, total=True, format=\"pandas\")\n", " data = data.groupby(\"category\").get_group(\"with_mirrors\").sort_values(\"date\")\n", " start_date = date.today() - relativedelta(months=int(time.split(\" \")[0]))\n", " data = data[(data['date'] > str(start_date))]\n", " data.date = pd.to_datetime(pd.to_datetime(data.date))\n", " return gr.LinePlot(value=data, x=\"date\", y=\"downloads\",\n", " tooltip=['date', 'downloads'],\n", " title=f\"Pypi downloads of {lib} over last {time}\",\n", " overlay_point=True,\n", " height=400,\n", " width=900)\n", "\n", "with gr.Blocks() as demo:\n", " gr.Markdown(\n", " \"\"\"\n", " ## Pypi Download Stats \ud83d\udcc8\n", " See live download stats for all of Hugging Face's open-source libraries \ud83e\udd17\n", " \"\"\")\n", " with gr.Row():\n", " lib = gr.Dropdown([\"transformers\", \"datasets\", \"huggingface-hub\", \"gradio\", \"accelerate\"],\n", " value=\"gradio\", label=\"Library\")\n", " time = gr.Dropdown([\"3 months\", \"6 months\", \"9 months\", \"12 months\"],\n", " value=\"3 months\", label=\"Downloads over the last...\")\n", "\n", " plt = gr.LinePlot()\n", " # You can add multiple event triggers in 2 lines like this\n", " for event in [lib.change, time.change, demo.load]:\n", " event(get_plot, [lib, time], [plt])\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
 
1
+ {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: blocks_multiple_event_triggers"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio plotly pypistats python-dateutil "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import pypistats # type: ignore\n", "from datetime import date\n", "from dateutil.relativedelta import relativedelta\n", "import pandas as pd\n", "\n", "def get_plot(lib, time):\n", " data = pypistats.overall(lib, total=True, format=\"pandas\")\n", " data = data.groupby(\"category\").get_group(\"with_mirrors\").sort_values(\"date\")\n", " start_date = date.today() - relativedelta(months=int(time.split(\" \")[0]))\n", " data = data[(data['date'] > str(start_date))]\n", " data.date = pd.to_datetime(pd.to_datetime(data.date))\n", " return gr.LinePlot(value=data, x=\"date\", y=\"downloads\",\n", " tooltip=['date', 'downloads'],\n", " title=f\"Pypi downloads of {lib} over last {time}\",\n", " height=400)\n", "\n", "with gr.Blocks() as demo:\n", " gr.Markdown(\n", " \"\"\"\n", " ## Pypi Download Stats \ud83d\udcc8\n", " See live download stats for all of Hugging Face's open-source libraries \ud83e\udd17\n", " \"\"\")\n", " with gr.Row():\n", " lib = gr.Dropdown([\"transformers\", \"datasets\", \"huggingface-hub\", \"gradio\", \"accelerate\"],\n", " value=\"gradio\", label=\"Library\")\n", " time = gr.Dropdown([\"3 months\", \"6 months\", \"9 months\", \"12 months\"],\n", " value=\"3 months\", label=\"Downloads over the last...\")\n", "\n", " plt = gr.LinePlot()\n", " # You can add multiple event triggers in 2 lines like this\n", " for event in [lib.change, time.change, demo.load]:\n", " event(get_plot, [lib, time], [plt])\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
demos/blocks_multiple_event_triggers/run.py CHANGED
@@ -13,9 +13,7 @@ def get_plot(lib, time):
13
  return gr.LinePlot(value=data, x="date", y="downloads",
14
  tooltip=['date', 'downloads'],
15
  title=f"Pypi downloads of {lib} over last {time}",
16
- overlay_point=True,
17
- height=400,
18
- width=900)
19
 
20
  with gr.Blocks() as demo:
21
  gr.Markdown(
 
13
  return gr.LinePlot(value=data, x="date", y="downloads",
14
  tooltip=['date', 'downloads'],
15
  title=f"Pypi downloads of {lib} over last {time}",
16
+ height=400)
 
 
17
 
18
  with gr.Blocks() as demo:
19
  gr.Markdown(
demos/calculator/run.ipynb CHANGED
@@ -1 +1 @@
1
- {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: calculator"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["# Downloading files from the demo repo\n", "import os\n", "os.mkdir('examples')\n", "!wget -q -O examples/log.csv https://github.com/gradio-app/gradio/raw/main/demo/calculator/examples/log.csv"]}, {"cell_type": "code", "execution_count": null, "id": "44380577570523278879349135829904343037", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "def calculator(num1, operation, num2):\n", " if operation == \"add\":\n", " return num1 + num2\n", " elif operation == \"subtract\":\n", " return num1 - num2\n", " elif operation == \"multiply\":\n", " return num1 * num2\n", " elif operation == \"divide\":\n", " if num2 == 0:\n", " raise gr.Error(\"Cannot divide by zero!\")\n", " return num1 / num2\n", "\n", "demo = gr.Interface(\n", " calculator,\n", " [\n", " \"number\",\n", " gr.Radio([\"add\", \"subtract\", \"multiply\", \"divide\"]),\n", " \"number\"\n", " ],\n", " \"number\",\n", " examples=[\n", " [45, \"add\", 3],\n", " [3.14, \"divide\", 2],\n", " [144, \"multiply\", 2.5],\n", " [0, \"subtract\", 1.2],\n", " ],\n", " title=\"Toy Calculator\",\n", " description=\"Here's a sample toy calculator.\",\n", ")\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
 
1
+ {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: calculator"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["# Downloading files from the demo repo\n", "import os\n", "os.mkdir('examples')\n", "!wget -q -O examples/log.csv https://github.com/gradio-app/gradio/raw/main/demo/calculator/examples/log.csv"]}, {"cell_type": "code", "execution_count": null, "id": "44380577570523278879349135829904343037", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "def calculator(num1, operation, num2):\n", " if operation == \"add\":\n", " return num1 + num2\n", " elif operation == \"subtract\":\n", " return num1 - num2\n", " elif operation == \"multiply\":\n", " return num1 * num2\n", " elif operation == \"divide\":\n", " if num2 == 0:\n", " raise gr.Error(\"Cannot divide by zero!\")\n", " return num1 / num2\n", "\n", "demo = gr.Interface(\n", " calculator,\n", " [\n", " \"number\",\n", " gr.Radio([\"add\", \"subtract\", \"multiply\", \"divide\"]),\n", " \"number\"\n", " ],\n", " \"number\",\n", " examples=[\n", " [45, \"add\", 3],\n", " [3.14, \"divide\", 2],\n", " [144, \"multiply\", 2.5],\n", " [0, \"subtract\", 1.2],\n", " ],\n", " title=\"Toy Calculator\",\n", " description=\"Here's a sample toy calculator.\",\n", " api_name=\"predict\"\n", ")\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
demos/calculator/run.py CHANGED
@@ -28,6 +28,7 @@ demo = gr.Interface(
28
  ],
29
  title="Toy Calculator",
30
  description="Here's a sample toy calculator.",
 
31
  )
32
 
33
  if __name__ == "__main__":
 
28
  ],
29
  title="Toy Calculator",
30
  description="Here's a sample toy calculator.",
31
+ api_name="predict"
32
  )
33
 
34
  if __name__ == "__main__":
demos/chatbot_multimodal/run.ipynb CHANGED
@@ -1 +1 @@
1
- {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: chatbot_multimodal"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["# Downloading files from the demo repo\n", "import os\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/chatbot_multimodal/tuples_testcase.py"]}, {"cell_type": "code", "execution_count": null, "id": "44380577570523278879349135829904343037", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import time\n", "\n", "# Chatbot demo with multimodal input (text, markdown, LaTeX, code blocks, image, audio, & video). Plus shows support for streaming text.\n", "\n", "\n", "def print_like_dislike(x: gr.LikeData):\n", " print(x.index, x.value, x.liked)\n", "\n", "\n", "def add_message(history, message):\n", " for x in message[\"files\"]:\n", " history.append({\"role\": \"user\", \"content\": {\"path\": x}})\n", " if message[\"text\"] is not None:\n", " history.append({\"role\": \"user\", \"content\": message[\"text\"]})\n", " return history, gr.MultimodalTextbox(value=None, interactive=False)\n", "\n", "\n", "def bot(history: list):\n", " response = \"**That's cool!**\"\n", " history.append({\"role\": \"assistant\", \"content\": \"\"})\n", " for character in response:\n", " history[-1][\"content\"] += character\n", " time.sleep(0.05)\n", " yield history\n", "\n", "\n", "with gr.Blocks() as demo:\n", " chatbot = gr.Chatbot(elem_id=\"chatbot\", bubble_full_width=False, type=\"messages\")\n", "\n", " chat_input = gr.MultimodalTextbox(\n", " interactive=True,\n", " file_count=\"multiple\",\n", " placeholder=\"Enter message or upload file...\",\n", " show_label=False,\n", " sources=[\"microphone\", \"upload\"],\n", " )\n", "\n", " chat_msg = chat_input.submit(\n", " add_message, [chatbot, chat_input], [chatbot, chat_input]\n", " )\n", " bot_msg = chat_msg.then(bot, chatbot, chatbot, api_name=\"bot_response\")\n", " bot_msg.then(lambda: gr.MultimodalTextbox(interactive=True), None, [chat_input])\n", "\n", " chatbot.like(print_like_dislike, None, None, like_user_message=True)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
 
1
+ {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: chatbot_multimodal"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import time\n", "\n", "# Chatbot demo with multimodal input (text, markdown, LaTeX, code blocks, image, audio, & video). Plus shows support for streaming text.\n", "\n", "\n", "def print_like_dislike(x: gr.LikeData):\n", " print(x.index, x.value, x.liked)\n", "\n", "\n", "def add_message(history, message):\n", " user_msg = {\"role\": \"user\", \"content\": []}\n", " for x in message[\"files\"]:\n", " user_msg[\"content\"].append({\"path\": x})\n", " if message[\"text\"] is not None:\n", " user_msg[\"content\"].append(message[\"text\"])\n", " history.append(user_msg)\n", " return history, gr.MultimodalTextbox(value=None, interactive=False)\n", "\n", "\n", "def bot(history: list):\n", " response = \"**That's cool!**\"\n", " history.append({\"role\": \"assistant\", \"content\": \"\"})\n", " for character in response:\n", " history[-1][\"content\"] += character\n", " time.sleep(0.05)\n", " yield history\n", "\n", "\n", "with gr.Blocks() as demo:\n", " chatbot = gr.Chatbot(elem_id=\"chatbot\")\n", "\n", " chat_input = gr.MultimodalTextbox(\n", " interactive=True,\n", " file_count=\"multiple\",\n", " placeholder=\"Enter message or upload file...\",\n", " show_label=False,\n", " sources=[\"microphone\", \"upload\"],\n", " )\n", "\n", " chat_msg = chat_input.submit(\n", " add_message, [chatbot, chat_input], [chatbot, chat_input]\n", " )\n", " bot_msg = chat_msg.then(bot, chatbot, chatbot, api_name=\"bot_response\")\n", " bot_msg.then(lambda: gr.MultimodalTextbox(interactive=True), None, [chat_input])\n", "\n", " chatbot.like(print_like_dislike, None, None, like_user_message=True)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
demos/chatbot_multimodal/run.py CHANGED
@@ -9,10 +9,12 @@ def print_like_dislike(x: gr.LikeData):
9
 
10
 
11
  def add_message(history, message):
 
12
  for x in message["files"]:
13
- history.append({"role": "user", "content": {"path": x}})
14
  if message["text"] is not None:
15
- history.append({"role": "user", "content": message["text"]})
 
16
  return history, gr.MultimodalTextbox(value=None, interactive=False)
17
 
18
 
@@ -26,7 +28,7 @@ def bot(history: list):
26
 
27
 
28
  with gr.Blocks() as demo:
29
- chatbot = gr.Chatbot(elem_id="chatbot", bubble_full_width=False, type="messages")
30
 
31
  chat_input = gr.MultimodalTextbox(
32
  interactive=True,
 
9
 
10
 
11
  def add_message(history, message):
12
+ user_msg = {"role": "user", "content": []}
13
  for x in message["files"]:
14
+ user_msg["content"].append({"path": x})
15
  if message["text"] is not None:
16
+ user_msg["content"].append(message["text"])
17
+ history.append(user_msg)
18
  return history, gr.MultimodalTextbox(value=None, interactive=False)
19
 
20
 
 
28
 
29
 
30
  with gr.Blocks() as demo:
31
+ chatbot = gr.Chatbot(elem_id="chatbot")
32
 
33
  chat_input = gr.MultimodalTextbox(
34
  interactive=True,
demos/chatinterface_streaming_echo/run.ipynb CHANGED
@@ -1 +1 @@
1
- {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: chatinterface_streaming_echo"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import time\n", "import gradio as gr\n", "\n", "def slow_echo(message, history):\n", " for i in range(len(message)):\n", " time.sleep(0.05)\n", " yield \"You typed: \" + message[: i + 1]\n", "\n", "demo = gr.ChatInterface(\n", " slow_echo,\n", " type=\"messages\",\n", " flagging_mode=\"manual\",\n", " flagging_options=[\"Like\", \"Spam\", \"Inappropriate\", \"Other\"],\n", " save_history=True,\n", ")\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
 
1
+ {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: chatinterface_streaming_echo"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import time\n", "import gradio as gr\n", "\n", "def slow_echo(message, history):\n", " for i in range(len(message)):\n", " time.sleep(0.05)\n", " yield \"You typed: \" + message[: i + 1]\n", "\n", "demo = gr.ChatInterface(\n", " slow_echo,\n", " flagging_mode=\"manual\",\n", " flagging_options=[\"Like\", \"Spam\", \"Inappropriate\", \"Other\"],\n", " save_history=True,\n", ")\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
demos/chatinterface_streaming_echo/run.py CHANGED
@@ -8,7 +8,6 @@ def slow_echo(message, history):
8
 
9
  demo = gr.ChatInterface(
10
  slow_echo,
11
- type="messages",
12
  flagging_mode="manual",
13
  flagging_options=["Like", "Spam", "Inappropriate", "Other"],
14
  save_history=True,
 
8
 
9
  demo = gr.ChatInterface(
10
  slow_echo,
 
11
  flagging_mode="manual",
12
  flagging_options=["Like", "Spam", "Inappropriate", "Other"],
13
  save_history=True,
demos/code/run.ipynb CHANGED
@@ -1 +1 @@
1
- {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: code"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["# Downloading files from the demo repo\n", "import os\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/code/file.css"]}, {"cell_type": "code", "execution_count": null, "id": "44380577570523278879349135829904343037", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import os\n", "from time import sleep\n", "\n", "css_file = os.path.join(os.path.abspath(''), \"file.css\")\n", "\n", "def set_lang(language):\n", " print(language)\n", " return gr.Code(language=language)\n", "\n", "def set_lang_from_path():\n", " sleep(1)\n", " return gr.Code(open(css_file).read(), language=\"css\")\n", "\n", "def code(language, code):\n", " return gr.Code(code, language=language)\n", "\n", "io = gr.Interface(lambda x: x, \"code\", \"code\")\n", "\n", "with gr.Blocks() as demo:\n", " lang = gr.Dropdown(value=\"python\", choices=gr.Code.languages)\n", " with gr.Row():\n", " code_in = gr.Code(\n", " language=\"python\",\n", " label=\"Input\",\n", " value='def all_odd_elements(sequence):\\n \"\"\"Returns every odd element of the sequence.\"\"\"',\n", " show_line_numbers = False\n", " )\n", " code_out = gr.Code(label=\"Output\", show_line_numbers = True)\n", " btn = gr.Button(\"Run\")\n", " btn_two = gr.Button(\"Load File\")\n", "\n", " lang.change(set_lang, inputs=lang, outputs=code_in)\n", " btn.click(code, inputs=[lang, code_in], outputs=code_out)\n", " btn_two.click(set_lang_from_path, inputs=None, outputs=code_out)\n", " io.render()\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
 
1
+ {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: code"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["# Downloading files from the demo repo\n", "import os\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/code/file.css"]}, {"cell_type": "code", "execution_count": null, "id": "44380577570523278879349135829904343037", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import os\n", "from time import sleep\n", "\n", "css_file = os.path.join(os.path.abspath(''), \"file.css\")\n", "\n", "def set_lang(language):\n", " print(language)\n", " return gr.Code(language=language)\n", "\n", "def set_lang_from_path():\n", " sleep(1)\n", " return gr.Code(open(css_file).read(), language=\"css\")\n", "\n", "def code(language, code):\n", " return gr.Code(code, language=language)\n", "\n", "io = gr.Interface(lambda x: x, \"code\", \"code\", api_name=\"predict\")\n", "\n", "with gr.Blocks() as demo:\n", " lang = gr.Dropdown(value=\"python\", choices=gr.Code.languages)\n", " with gr.Row():\n", " code_in = gr.Code(\n", " language=\"python\",\n", " label=\"Input\",\n", " value='def all_odd_elements(sequence):\\n \"\"\"Returns every odd element of the sequence.\"\"\"',\n", " show_line_numbers = False\n", " )\n", " code_out = gr.Code(label=\"Output\", show_line_numbers = True)\n", " btn = gr.Button(\"Run\")\n", " btn_two = gr.Button(\"Load File\")\n", "\n", " lang.change(set_lang, inputs=lang, outputs=code_in)\n", " btn.click(code, inputs=[lang, code_in], outputs=code_out)\n", " btn_two.click(set_lang_from_path, inputs=None, outputs=code_out)\n", " io.render()\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
demos/code/run.py CHANGED
@@ -15,7 +15,7 @@ def set_lang_from_path():
15
  def code(language, code):
16
  return gr.Code(code, language=language)
17
 
18
- io = gr.Interface(lambda x: x, "code", "code")
19
 
20
  with gr.Blocks() as demo:
21
  lang = gr.Dropdown(value="python", choices=gr.Code.languages)
 
15
  def code(language, code):
16
  return gr.Code(code, language=language)
17
 
18
+ io = gr.Interface(lambda x: x, "code", "code", api_name="predict")
19
 
20
  with gr.Blocks() as demo:
21
  lang = gr.Dropdown(value="python", choices=gr.Code.languages)
demos/fake_diffusion_with_gif/run.ipynb CHANGED
@@ -1 +1 @@
1
- {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: fake_diffusion_with_gif"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio numpy requests Pillow "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import numpy as np\n", "import time\n", "import os\n", "from PIL import Image\n", "import requests\n", "from io import BytesIO\n", "\n", "def create_gif(images):\n", " pil_images = []\n", " for image in images:\n", " if isinstance(image, str):\n", " response = requests.get(image)\n", " image = Image.open(BytesIO(response.content))\n", " else:\n", " image = Image.fromarray((image * 255).astype(np.uint8))\n", " pil_images.append(image)\n", " fp_out = os.path.join(os.path.abspath(''), \"image.gif\")\n", " img = pil_images.pop(0)\n", " img.save(fp=fp_out, format='GIF', append_images=pil_images,\n", " save_all=True, duration=400, loop=0)\n", " return fp_out\n", "\n", "def fake_diffusion(steps):\n", " rng = np.random.default_rng()\n", " images = []\n", " for _ in range(steps):\n", " time.sleep(1)\n", " image = rng.random((600, 600, 3))\n", " images.append(image)\n", " yield image, gr.Image(visible=False)\n", "\n", " time.sleep(1)\n", " image = \"https://gradio-builds.s3.amazonaws.com/diffusion_image/cute_dog.jpg\"\n", " images.append(image)\n", " gif_path = create_gif(images)\n", "\n", " yield image, gr.Image(value=gif_path, visible=True)\n", "\n", "demo = gr.Interface(fake_diffusion,\n", " inputs=gr.Slider(1, 10, 3, step=1),\n", " outputs=[\"image\", gr.Image(label=\"All Images\", visible=False)])\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
 
1
+ {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: fake_diffusion_with_gif"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio numpy requests Pillow "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import numpy as np\n", "import time\n", "import os\n", "from PIL import Image\n", "import requests\n", "from io import BytesIO\n", "\n", "def create_gif(images):\n", " pil_images = []\n", " for image in images:\n", " if isinstance(image, str):\n", " response = requests.get(image)\n", " image = Image.open(BytesIO(response.content))\n", " else:\n", " image = Image.fromarray((image * 255).astype(np.uint8))\n", " pil_images.append(image)\n", " fp_out = os.path.join(os.path.abspath(''), \"image.gif\")\n", " img = pil_images.pop(0)\n", " img.save(fp=fp_out, format='GIF', append_images=pil_images,\n", " save_all=True, duration=400, loop=0)\n", " return fp_out\n", "\n", "def fake_diffusion(steps):\n", " rng = np.random.default_rng()\n", " images = []\n", " for _ in range(steps):\n", " time.sleep(1)\n", " image = rng.random((600, 600, 3))\n", " images.append(image)\n", " yield image, gr.Image(visible=False)\n", "\n", " time.sleep(1)\n", " image = \"https://gradio-builds.s3.amazonaws.com/diffusion_image/cute_dog.jpg\"\n", " images.append(image)\n", " gif_path = create_gif(images)\n", "\n", " yield image, gr.Image(value=gif_path, visible=True)\n", "\n", "demo = gr.Interface(fake_diffusion,\n", " inputs=gr.Slider(1, 10, 3, step=1),\n", " outputs=[\"image\", gr.Image(label=\"All Images\", visible=False)],\n", " api_name=\"predict\",)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
demos/fake_diffusion_with_gif/run.py CHANGED
@@ -39,7 +39,8 @@ def fake_diffusion(steps):
39
 
40
  demo = gr.Interface(fake_diffusion,
41
  inputs=gr.Slider(1, 10, 3, step=1),
42
- outputs=["image", gr.Image(label="All Images", visible=False)])
 
43
 
44
  if __name__ == "__main__":
45
  demo.launch()
 
39
 
40
  demo = gr.Interface(fake_diffusion,
41
  inputs=gr.Slider(1, 10, 3, step=1),
42
+ outputs=["image", gr.Image(label="All Images", visible=False)],
43
+ api_name="predict",)
44
 
45
  if __name__ == "__main__":
46
  demo.launch()