File size: 300 Bytes
d6aa4b0 7e64fd9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 |
import gradio as gr
import torch
from transformers import (
AutoModelForCausalLM,
AutoTokenizer,
TextIteratorStreamer
)
import os
import threading import Thread
import spaces
import time
token = os.environ["HF_TOKEN"]
model_name = ""
model = AutoModelForCausalLM.from_pretrained(
) |