File size: 789 Bytes
ac4b355
ef67cf9
ac4b355
 
ef67cf9
 
 
 
 
ac4b355
 
ef67cf9
 
 
 
ac4b355
 
ef67cf9
 
 
 
 
 
ac4b355
ef67cf9
ac4b355
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
import gradio as gr
import json
from huggingface_hub import hf_hub_download

model_path = hf_hub_download(repo_id="Mbilal755/Radiology_Bart", filename="tf_model.h5")

# Load model directly 
import tensorflow as tf
model = tf.keras.models.load_model(model_path)

# Load tokenizer
tokenizer_path = hf_hub_download(repo_id="Mbilal755/Radiology_Bart", filename="tokenizer.json")
with open(tokenizer_path) as f:
    tokenizer_data = json.load(f)
tokenizer = tokenizer_data["tokenizer"] 

def summarize(text):
  inputs = tokenizer.encode(text)
  
  # Run model inference
  summary_ids = model.generate(inputs)
  
  summary = tokenizer.decode(summary_ids)
  return summary
  
iface = gr.Interface(fn=summarize, inputs="text", outputs="text")

if __name__ == "__main__":
  iface.launch(share=True)