LR36 commited on
Commit
1bba206
·
verified ·
1 Parent(s): 729dcdf

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -0
app.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
3
+ from datasets import load_dataset
4
+ import pandas as pd
5
+
6
+ # Load model and tokenizer
7
+ model_name = "google/flan-t5-small"
8
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
9
+ model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
10
+
11
+ # Load CSV data (replace with your dataset logic)
12
+ def load_data():
13
+ dataset = load_dataset("csv", data_files={"train": "train.csv"}) # Adjust path
14
+ return dataset["train"]
15
+
16
+ # Generate text (inference)
17
+ def generate_text(prompt):
18
+ inputs = tokenizer(prompt, return_tensors="pt", truncation=True, max_length=512)
19
+ outputs = model.generate(**inputs, max_new_tokens=100)
20
+ return tokenizer.decode(outputs[0], skip_special_tokens=True)
21
+
22
+ # Fine-tune button (simplified example)
23
+ def fine_tune():
24
+ dataset = load_data()
25
+ # Add your training logic here (see previous examples)
26
+ return "Fine-tuning complete! (Note: Models reset when Space stops.)"
27
+
28
+ # Gradio UI
29
+ with gr.Blocks() as demo:
30
+ gr.Markdown("# FLAN-T5 Demo")
31
+ with gr.Tab("Generate Text"):
32
+ prompt = gr.Textbox(label="Input Prompt")
33
+ generate_btn = gr.Button("Generate")
34
+ output = gr.Textbox(label="Output")
35
+ generate_btn.click(fn=generate_text, inputs=prompt, outputs=output)
36
+
37
+ with gr.Tab("Fine-Tune"):
38
+ train_btn = gr.Button("Train on CSV Data")
39
+ train_output = gr.Textbox(label="Training Status")
40
+ train_btn.click(fn=fine_tune, outputs=train_output)
41
+
42
+ demo.launch()