Commit
·
efa8dd7
1
Parent(s):
4cea813
feat: upload installed model
Browse files- .gitignore +0 -1
- artifacts/model-ql03493w:v3/model.ckpt +3 -0
- model.py +9 -9
.gitignore
CHANGED
@@ -1,2 +1 @@
|
|
1 |
-
artifacts/
|
2 |
wandb/
|
|
|
|
|
1 |
wandb/
|
artifacts/model-ql03493w:v3/model.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d182f833fd1de144a5d6346891697597362fbca2c8198b9baa71690cb014ff80
|
3 |
+
size 2583633897
|
model.py
CHANGED
@@ -88,17 +88,17 @@ class ImageCaptioner(pl.LightningModule):
|
|
88 |
|
89 |
return out
|
90 |
|
91 |
-
@st.cache_resource
|
92 |
-
def download_trained_model():
|
93 |
-
|
94 |
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
|
99 |
-
|
100 |
|
101 |
-
|
102 |
|
103 |
@st.cache_resource
|
104 |
def load_clip_model():
|
@@ -120,7 +120,7 @@ def load_gpt_model():
|
|
120 |
def load_model():
|
121 |
|
122 |
# # Load fine-tuned model from wandb
|
123 |
-
artifact_dir =
|
124 |
PATH = f"{artifact_dir[2:]}/model.ckpt"
|
125 |
|
126 |
# Load pretrained GPT, CLIP model from OpenAI
|
|
|
88 |
|
89 |
return out
|
90 |
|
91 |
+
# @st.cache_resource
|
92 |
+
# def download_trained_model():
|
93 |
+
# wandb.init(anonymous="must")
|
94 |
|
95 |
+
# api = wandb.Api()
|
96 |
+
# artifact = api.artifact('hungchiehwu/CLIP-L14_GPT/model-ql03493w:v3')
|
97 |
+
# artifact_dir = artifact.download()
|
98 |
|
99 |
+
# wandb.finish()
|
100 |
|
101 |
+
# return artifact_dir
|
102 |
|
103 |
@st.cache_resource
|
104 |
def load_clip_model():
|
|
|
120 |
def load_model():
|
121 |
|
122 |
# # Load fine-tuned model from wandb
|
123 |
+
artifact_dir = "./artifacts/model-ql03493w:v3"
|
124 |
PATH = f"{artifact_dir[2:]}/model.ckpt"
|
125 |
|
126 |
# Load pretrained GPT, CLIP model from OpenAI
|