Mbilal755 commited on
Commit
ef67cf9
1 Parent(s): afecea7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -9
app.py CHANGED
@@ -1,20 +1,28 @@
1
  import gradio as gr
2
- import torch
3
  from huggingface_hub import hf_hub_download
4
 
5
- model_path = hf_hub_download(repo_id="Mbilal755/Radiology_Bart", filename="pytorch_model.bin")
6
- model = torch.load(model_path)
 
 
 
7
 
8
  # Load tokenizer
9
- tokenizer = hf_hub_download(repo_id="Mbilal755/Radiology_Bart", filename="tokenizer.json")
10
- tokenizer = tokenizer.tokenizer
 
 
11
 
12
  def summarize(text):
13
- inputs = tokenizer(text, return_tensors="pt")
14
- summary_ids = model.generate(inputs["input_ids"])
15
- summary = tokenizer.decode(summary_ids[0])
 
 
 
16
  return summary
17
-
18
  iface = gr.Interface(fn=summarize, inputs="text", outputs="text")
19
 
20
  if __name__ == "__main__":
 
1
  import gradio as gr
2
+ import json
3
  from huggingface_hub import hf_hub_download
4
 
5
+ model_path = hf_hub_download(repo_id="Mbilal755/Radiology_Bart", filename="tf_model.h5")
6
+
7
+ # Load model directly
8
+ import tensorflow as tf
9
+ model = tf.keras.models.load_model(model_path)
10
 
11
  # Load tokenizer
12
+ tokenizer_path = hf_hub_download(repo_id="Mbilal755/Radiology_Bart", filename="tokenizer.json")
13
+ with open(tokenizer_path) as f:
14
+ tokenizer_data = json.load(f)
15
+ tokenizer = tokenizer_data["tokenizer"]
16
 
17
  def summarize(text):
18
+ inputs = tokenizer.encode(text)
19
+
20
+ # Run model inference
21
+ summary_ids = model.generate(inputs)
22
+
23
+ summary = tokenizer.decode(summary_ids)
24
  return summary
25
+
26
  iface = gr.Interface(fn=summarize, inputs="text", outputs="text")
27
 
28
  if __name__ == "__main__":