{ "nbformat": 4, "nbformat_minor": 0, "metadata": { "colab": { "provenance": [], "gpuType": "T4", "include_colab_link": true }, "kernelspec": { "name": "python3", "display_name": "Python 3" }, "language_info": { "name": "python" }, "accelerator": "GPU" }, "cells": [ { "cell_type": "markdown", "metadata": { "id": "view-in-github", "colab_type": "text" }, "source": [ "\"Open" ] }, { "cell_type": "markdown", "source": [ "# TensorLM - webui for LLM models\n", "\n", "![preview.png](https://raw.githubusercontent.com/ehristoforu/TensorLM-webui/main/assets/preview.png?token=GHSAT0AAAAAACMN7XRD5UHXZGGJF77PGJDKZN7SQLA)\n", "\n", "This is simple and modern [Gradio](https://gradio.app) webui for LLM models GGML format (.bin) based on LLaMA." ], "metadata": { "id": "1OkQD7FRWDAg" } }, { "cell_type": "code", "execution_count": null, "metadata": { "cellView": "form", "id": "66p_ooUpUJ0o" }, "outputs": [], "source": [ "#@title Install & Run UI\n", "\n", "!git clone https://github.com/ehristoforu/TensorLM-webui.git -b colab\n", "\n", "%cd /content/\n", "\n", "!pip install -q -r TensorLM-webui/requirements.txt\n", "\n", "!wget -O TensorLM-webui/models/llama-2-7b-chat.ggmlv3.q2_K.bin https://huggingface.co/ehristoforu/LLMs/resolve/main/llama-2-7b-chat.ggmlv3.q2_K.bin\n", "\n", "%cd TensorLM-webui\n", "\n", "!python webui.py" ] } ] }