wenkai commited on
Commit
aa13001
1 Parent(s): 72b0e49

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -12
app.py CHANGED
@@ -2,29 +2,22 @@ import gradio as gr
2
  from transformers import AutoProcessor, AutoModelForCausalLM
3
  import spaces
4
  import torch.nn.functional as F
5
- import requests
6
  import copy
7
  import torch
8
- from PIL import Image, ImageDraw, ImageFont
9
- import io
10
- import matplotlib.pyplot as plt
11
- import matplotlib.patches as patches
12
 
13
  import random
14
  import numpy as np
15
  from esm import pretrained, FastaBatchedDataset
16
 
17
 
 
 
 
 
18
  models = {
19
- 'facebook/esm2_t36_3B_UR50D': pretrained.load_model_and_alphabet('esm2_t36_3B_UR50D').to("cuda").eval(),
20
  }
21
 
22
- processors = {
23
- 'microsoft/Florence-2-large-ft': AutoProcessor.from_pretrained('microsoft/Florence-2-large-ft', trust_remote_code=True),
24
- 'microsoft/Florence-2-large': AutoProcessor.from_pretrained('microsoft/Florence-2-large', trust_remote_code=True),
25
- 'microsoft/Florence-2-base-ft': AutoProcessor.from_pretrained('microsoft/Florence-2-base-ft', trust_remote_code=True),
26
- 'microsoft/Florence-2-base': AutoProcessor.from_pretrained('microsoft/Florence-2-base', trust_remote_code=True),
27
- }
28
 
29
 
30
  DESCRIPTION = "Esm2 embedding"
 
2
  from transformers import AutoProcessor, AutoModelForCausalLM
3
  import spaces
4
  import torch.nn.functional as F
 
5
  import copy
6
  import torch
 
 
 
 
7
 
8
  import random
9
  import numpy as np
10
  from esm import pretrained, FastaBatchedDataset
11
 
12
 
13
+ def get_model(model_id):
14
+ a, b = pretrained.load_model_and_alphabet(model_id.split('/')[1])
15
+ return (a, b)
16
+
17
  models = {
18
+ 'facebook/esm2_t36_3B_UR50D': get_model('facebook/esm2_t36_3B_UR50D'),
19
  }
20
 
 
 
 
 
 
 
21
 
22
 
23
  DESCRIPTION = "Esm2 embedding"