Mbilal755 commited on
Commit
9f88fa2
1 Parent(s): 9c2fdfd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -22
app.py CHANGED
@@ -1,29 +1,16 @@
1
  import gradio as gr
2
- import json
3
- from huggingface_hub import hf_hub_download
4
 
5
- model_path = hf_hub_download(repo_id="Mbilal755/Radiology_Bart", filename="tf_model.h5")
 
6
 
7
- # Load model directly
8
- import tensorflow as tf
9
- model = tf.keras.models.load_model(model_path)
 
 
10
 
11
- # Load tokenizer
12
- tokenizer_path = hf_hub_download(repo_id="Mbilal755/Radiology_Bart", filename="tokenizer.json")
13
- with open(tokenizer_path) as f:
14
- tokenizer_data = json.load(f)
15
- tokenizer = tokenizer_data["tokenizer"]
16
-
17
- def summarize(text):
18
- inputs = tokenizer.encode(text)
19
-
20
- # Run model inference
21
- summary_ids = model.generate(inputs)
22
-
23
- summary = tokenizer.decode(summary_ids)
24
- return summary
25
-
26
  iface = gr.Interface(fn=summarize, inputs="text", outputs="text")
27
 
28
  if __name__ == "__main__":
29
- iface.launch(share=True)
 
1
  import gradio as gr
2
+ from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
 
3
 
4
+ model = AutoModelForSeq2SeqLM.from_pretrained("Mbilal755/Radiology_Bart")
5
+ tokenizer = AutoTokenizer.from_pretrained("Mbilal755/Radiology_Bart")
6
 
7
+ def summarize(input):
8
+ inputs = tokenizer(input, return_tensors="pt")
9
+ output = model.generate(inputs["input_ids"])
10
+ summary = tokenizer.decode(output[0], skip_special_tokens=True)
11
+ return summary
12
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  iface = gr.Interface(fn=summarize, inputs="text", outputs="text")
14
 
15
  if __name__ == "__main__":
16
+ iface.launch(share=True)