File size: 1,640 Bytes
ae1dc47
281af70
ae1dc47
 
 
281af70
1d9ca95
281af70
f9db121
281af70
b40d807
 
ae1dc47
bb83af2
65debe0
 
ae1dc47
 
1d9ca95
ae1dc47
 
 
c4f4d6f
1d9ca95
42bc81f
 
1d9ca95
 
42bc81f
ae1dc47
 
 
 
 
6f98e70
d2cfd80
bb83af2
ae1dc47
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
import os
import gradio as gr
import torch
import numpy as np
from transformers import pipeline

name_list = ['microsoft/biogpt', 'stanford-crfm/BioMedLM', 'facebook/galactica-1.3b']

examples = [['COVID-19 is'],['A 65-year-old female patient with a past medical history of']] 

print(f"Is CUDA available: {torch.cuda.is_available()}")
print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")

pipe_biogpt = pipeline("text-generation", model="microsoft/BioGPT-Large", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
pipe_biomedlm = pipeline("text-generation", model="stanford-crfm/BioMedLM", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})
pipe_galactica = pipeline("text-generation", model="facebook/galactica-1.3b", device="cuda:0", model_kwargs={"torch_dtype":torch.bfloat16})

title = "Compare generative biomedical LLMs!"
description = "**Disclaimer:** this demo was made for research purposes only and should not be used for medical purposes."

def inference(text):
  output_biogpt = pipe_biogpt(text, max_length=100)[0]["generated_text"]
  output_biomedlm = pipe_biomedlm(text, max_length=100)[0]["generated_text"]
  output_galactica = pipe_galactica(text, max_length=100)[0]["generated_text"]
  return [
      output_biogpt, 
      output_biomedlm,
      output_galactica
  ]

io = gr.Interface(
  inference,
  gr.Textbox(lines=3),
  outputs=[
    gr.Textbox(lines=3, label="BioGPT-Large"),
    gr.Textbox(lines=3, label="BioMedLM (fka PubmedGPT)"),
    gr.Textbox(lines=3, label="Galactica 1.3B"),
  ],
  title=title,
  description=description,
  examples=examples
)
io.launch()