katielink commited on
Commit
1d9ca95
1 Parent(s): c40b81d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -5
app.py CHANGED
@@ -4,26 +4,28 @@ import torch
4
  import numpy as np
5
  from transformers import pipeline
6
 
7
- name_list = ['microsoft/biogpt', 'stanford-crfm/BioMedLM']
8
 
9
  examples = [['COVID-19 is'],['A 65-year-old female patient with a past medical history of']]
10
 
11
- import torch
12
  print(f"Is CUDA available: {torch.cuda.is_available()}")
13
  print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
14
 
15
  pipe_biogpt = pipeline("text-generation", model="microsoft/biogpt")
16
  pipe_biomedlm = pipeline("text-generation", model="stanford-crfm/BioMedLM", device="cuda:0")
 
17
 
18
  title = "Compare generative biomedical LLMs!"
19
- description = "This demo compares [BioGPT](https://huggingface.co/microsoft/biogpt) and [BioMedLM](https://huggingface.co/stanford-crfm/BioMedLM). **Disclaimer:** this demo was made for research purposes only and should not be used for medical purposes."
20
 
21
  def inference(text):
22
  output_biogpt = pipe_biogpt(text, max_length=100)[0]["generated_text"]
23
  output_biomedlm = pipe_biomedlm(text, max_length=100)[0]["generated_text"]
 
24
  return [
25
  output_biogpt,
26
- output_biomedlm
 
27
  ]
28
 
29
  io = gr.Interface(
@@ -31,7 +33,8 @@ io = gr.Interface(
31
  gr.Textbox(lines=3),
32
  outputs=[
33
  gr.Textbox(lines=3, label="BioGPT"),
34
- gr.Textbox(lines=3, label="BioMedLM")
 
35
  ],
36
  title=title,
37
  description=description,
 
4
  import numpy as np
5
  from transformers import pipeline
6
 
7
+ name_list = ['microsoft/biogpt', 'stanford-crfm/BioMedLM', 'facebook/galactica-1.3b']
8
 
9
  examples = [['COVID-19 is'],['A 65-year-old female patient with a past medical history of']]
10
 
 
11
  print(f"Is CUDA available: {torch.cuda.is_available()}")
12
  print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
13
 
14
  pipe_biogpt = pipeline("text-generation", model="microsoft/biogpt")
15
  pipe_biomedlm = pipeline("text-generation", model="stanford-crfm/BioMedLM", device="cuda:0")
16
+ pipe_galactica = pipeline("text-generation", model="facebook/galactica-1.3b", device="cuda:0")
17
 
18
  title = "Compare generative biomedical LLMs!"
19
+ description = "**Disclaimer:** this demo was made for research purposes only and should not be used for medical purposes."
20
 
21
  def inference(text):
22
  output_biogpt = pipe_biogpt(text, max_length=100)[0]["generated_text"]
23
  output_biomedlm = pipe_biomedlm(text, max_length=100)[0]["generated_text"]
24
+ output_galactica = pipe_galactica(text, max_length=100)[0]["generated_text"]
25
  return [
26
  output_biogpt,
27
+ output_biomedlm,
28
+ output_galactica
29
  ]
30
 
31
  io = gr.Interface(
 
33
  gr.Textbox(lines=3),
34
  outputs=[
35
  gr.Textbox(lines=3, label="BioGPT"),
36
+ gr.Textbox(lines=3, label="BioMedLM (fka PubmedGPT)")
37
+ gr.Textbox(lines=3, label="Galactica")
38
  ],
39
  title=title,
40
  description=description,