Spaces:
Running
Running
Fix loading issues in the App
Browse files
app.py
CHANGED
@@ -1,15 +1,17 @@
|
|
1 |
-
import streamlit as st
|
2 |
-
import pandas as pd
|
3 |
-
import numpy as np
|
4 |
import os
|
|
|
5 |
import matplotlib.pyplot as plt
|
|
|
|
|
|
|
6 |
from transformers import CLIPProcessor
|
|
|
7 |
from medclip.modeling_hybrid_clip import FlaxHybridCLIP
|
8 |
|
9 |
|
10 |
@st.cache(allow_output_mutation=True)
|
11 |
def load_model():
|
12 |
-
model = FlaxHybridCLIP.from_pretrained("flax-community/medclip-roco")
|
13 |
processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
|
14 |
return model, processor
|
15 |
|
|
|
|
|
|
|
|
|
1 |
import os
|
2 |
+
|
3 |
import matplotlib.pyplot as plt
|
4 |
+
import numpy as np
|
5 |
+
import pandas as pd
|
6 |
+
import streamlit as st
|
7 |
from transformers import CLIPProcessor
|
8 |
+
|
9 |
from medclip.modeling_hybrid_clip import FlaxHybridCLIP
|
10 |
|
11 |
|
12 |
@st.cache(allow_output_mutation=True)
|
13 |
def load_model():
|
14 |
+
model = FlaxHybridCLIP.from_pretrained("flax-community/medclip-roco", _do_init=False)
|
15 |
processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
|
16 |
return model, processor
|
17 |
|