File size: 1,721 Bytes
010eed5
1
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: autocomplete\n", "### This text generation demo works like autocomplete. There's only one textbox and it's used for both the input and the output. The demo loads the model as an interface, and uses that interface as an API. It then uses blocks to create the UI. All of this is done in less than 10 lines of code.\n", "        "]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import os\n", "\n", "# save your HF API token from https:/hf.co/settings/tokens as an env variable to avoid rate limiting\n", "hf_token = os.getenv(\"hf_token\")\n", "\n", "# load a model from https://hf.co/models as an interface, then use it as an api \n", "# you can remove the hf_token parameter if you don't care about rate limiting. \n", "api = gr.load(\"huggingface/gpt2-xl\", hf_token=hf_token)\n", "\n", "def complete_with_gpt(text):\n", "    return text[:-50] + api(text[-50:])\n", "\n", "with gr.Blocks() as demo:\n", "    textbox = gr.Textbox(placeholder=\"Type here...\", lines=4)\n", "    btn = gr.Button(\"Autocomplete\")\n", "    \n", "    # define what will run when the button is clicked, here the textbox is used as both an input and an output\n", "    btn.click(fn=complete_with_gpt, inputs=textbox, outputs=textbox, queue=False)\n", "\n", "demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}