TCM-QNA / app.py
ksh-nyp's picture
Update app.py
b5ae183 verified
import gradio as gr
from transformers import pipeline
# Initialize the pipeline with the model
model_name = "ksh-nyp/llama-2-7b-chat-TCMKB2"
pipe = pipeline("text-generation", model=model_name, device=0)
def generate_text(prompt):
# Generate text based on the input prompt
results = pipe(prompt, max_length=1024)
return results[0]['generated_text']
# Create the Gradio interface
interface = gr.Interface(
fn=generate_text,
inputs=gr.inputs.Textbox(lines=2, placeholder="Enter your prompt here..."),
outputs="text",
title="Text Generation with TCM Fine-Tuned LLaMA 2 7B",
description="Enter a prompt to generate text using the TCM Fine-Tuned LLaMA 2 7B model."
)
# Launch the app
interface.launch()