readme-chatbot / app.py
Blane187's picture
Update app.py
e97955a verified
import gradio as gr
import transformers
# Load your custom models (example)
model_name = "microsoft/Phi-3-mini-4k-instruct" # Replace with your model name
tokenizer = transformers.AutoTokenizer.from_pretrained(model_name)
model = transformers.AutoModelForCausalLM.from_pretrained(model_name)
def chatbot_response(user_input):
inputs = tokenizer.encode(user_input, return_tensors="pt")
outputs = model.generate(inputs, max_length=100, num_return_sequences=1)
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
return response
def upload_readme(filepath):
if filepath is not None:
with open(filepath, 'r', encoding='utf-8') as file:
content = file.read()
return content
return "No file uploaded"
with gr.Blocks() as demo:
with gr.Row():
with gr.Column():
gr.Markdown("# Chatbot Interface")
gr.Markdown("Upload your README file and interact with the chatbot.")
# File upload
readme_file = gr.File(label="Upload README file", type="filepath", file_types=[".md"])
readme_content = gr.Textbox(label="README Content", lines=10, placeholder="README content will appear here...")
# Display README content after upload
readme_file.change(upload_readme, inputs=readme_file, outputs=readme_content)
# Chatbot input and output
user_input = gr.Textbox(label="Your message", placeholder="Type your message here...")
output = gr.Textbox(label="Chatbot response", placeholder="Chatbot response will appear here...", lines=5)
# Get chatbot response
user_input.submit(chatbot_response, inputs=user_input, outputs=output)
demo.launch()