partnersfactory commited on
Commit
4936344
1 Parent(s): 450bcc5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +37 -1
app.py CHANGED
@@ -1,3 +1,39 @@
 
 
 
 
 
1
  import gradio as gr
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
 
3
- gr.Interface.load("models/microsoft/BioGPT-Large").launch()
 
 
 
 
 
 
 
 
 
 
 
1
+ #import gradio as gr
2
+ #
3
+ #gr.Interface.load("models/microsoft/BioGPT-Large").launch()
4
+
5
+ import os
6
  import gradio as gr
7
+ import torch
8
+ from transformers import pipeline
9
+
10
+ print(f"Is CUDA available: {torch.cuda.is_available()}")
11
+ print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
12
+
13
+ examples = [['COVID-19 is'],['A 65-year-old female patient with a past medical history of']]
14
+
15
+ pipe_biogpt = pipeline("text-generation", model="microsoft/biogpt-large", device="cuda:0")
16
+
17
+ title = "BioGPT-Large Demo"
18
+ description = """
19
+ Check out the [BioGPT-Large model card](https://huggingface.co/microsoft/biogpt-large) for more info.
20
+ **Disclaimer:** this demo was made for research purposes only and should not be used for medical purposes.
21
+ """
22
+
23
+ def inference(text):
24
+ output_biogpt = pipe_biogpt(text, max_length=100)[0]["generated_text"]
25
+ return [
26
+ output_biogpt,
27
+ ]
28
 
29
+ io = gr.Interface(
30
+ inference,
31
+ gr.Textbox(lines=3),
32
+ outputs=[
33
+ gr.Textbox(lines=3, label="BioGPT-Large"),
34
+ ],
35
+ title=title,
36
+ description=description,
37
+ examples=examples
38
+ )
39
+ io.launch()