{ "nbformat": 4, "nbformat_minor": 0, "metadata": { "colab": { "provenance": [], "gpuType": "T4", "authorship_tag": "ABX9TyOZhPcZe61RhDjhEFQv0vrl", "include_colab_link": true }, "kernelspec": { "name": "python3", "display_name": "Python 3" }, "language_info": { "name": "python" }, "accelerator": "GPU" }, "cells": [ { "cell_type": "markdown", "metadata": { "id": "view-in-github", "colab_type": "text" }, "source": [ "\"Open" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "7O5JSosg5-rx" }, "outputs": [], "source": [ "!pip install -U llama2-wrapper==0.1.12" ] }, { "cell_type": "code", "source": [ "%cd /content\n", "!git clone https://github.com/liltom-eth/llama2-webui\n", "\n", "%cd /content/llama2-webui\n", "!python -m llama2_wrapper.download --repo_id TheBloke/CodeLlama-7B-Instruct-GPTQ\n", "\n", "%cd /content/llama2-webui\n", "!python app.py --backend_type gptq --model_path ./models/CodeLlama-7B-Instruct-GPTQ/ --share True" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "Y6A7bJdkmzY8", "outputId": "0d702a7d-68ab-4747-f012-246d4dee3718" }, "execution_count": 4, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "/content\n", "fatal: destination path 'llama2-webui' already exists and is not an empty directory.\n", "/content/llama2-webui\n", "Start downloading model TheBloke/CodeLlama-7B-Instruct-GPTQ to: ./models/CodeLlama-7B-Instruct-GPTQ\n", "Fetching 15 files: 0% 0/15 [00:00\n", " main()\n", " File \"/content/llama2-webui/app.py\", line 318, in main\n", " demo.queue(max_size=20).launch(share=args.share)\n", " File \"/usr/local/lib/python3.10/dist-packages/gradio/blocks.py\", line 2046, in launch\n", " self.block_thread()\n", " File \"/usr/local/lib/python3.10/dist-packages/gradio/blocks.py\", line 2132, in block_thread\n", " print(\"Keyboard interruption in main thread... closing server.\")\n", "KeyboardInterrupt\n", "Killing tunnel 127.0.0.1:7860 <> https://71c3606942c440e7dd.gradio.live\n", "terminate called without an active exception\n" ] } ] } ] }