kamranshah commited on
Commit
3650a76
1 Parent(s): e808242

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -0
app.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import T5ForConditionalGeneration, T5Tokenizer
2
+
3
+ # Load the pretrained T5 model
4
+ model_name = "t5-small"
5
+ model = T5ForConditionalGeneration.from_pretrained(model_name)
6
+ tokenizer = T5Tokenizer.from_pretrained(model_name)
7
+
8
+ # Your input text
9
+ input_text = "LLMs are pre-trained on a massive amount of data"
10
+ "They are extremely flexible because they can be trained to perform a variety of tasks"
11
+ "such as text generation, summarization, and translation"
12
+ "They are also scalable because they can be fine-tuned to specific tasks, which can improve their performance"
13
+ # Prefix the input with a prompt so T5 knows this is a summarization task
14
+ prompt = "summarize: " + input_text
15
+
16
+ # Tokenize and generate the summary
17
+ inputs = tokenizer.encode(prompt, return_tensors="pt", max_length=512, truncation=True)
18
+ summary_ids = model.generate(inputs, max_length=150, min_length=50, length_penalty=2.0, num_beams=4, early_stopping=True)
19
+ summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
20
+
21
+ print("Summary:")
22
+ print(summary)