Update generate-dict-embeddingsXL.py
Browse files
generate-dict-embeddingsXL.py
CHANGED
@@ -106,7 +106,7 @@ all_embeddings = []
|
|
106 |
|
107 |
for word in tokendict:
|
108 |
emb = embed_from_text2(word)
|
109 |
-
emb=emb.unsqueeze(0) # stupid matrix magic to make
|
110 |
all_embeddings.append(emb)
|
111 |
count+=1
|
112 |
if (count %100) ==0:
|
@@ -127,6 +127,9 @@ for id in range(49405):
|
|
127 |
embs = torch.cat(all_embeddings,dim=0)
|
128 |
print("Shape of result = ",embs.shape)
|
129 |
|
|
|
|
|
|
|
130 |
print(f"Saving the calculatiuons to {outputfile}...")
|
131 |
save_file({"embeddings": embs}, outputfile)
|
132 |
|
|
|
106 |
|
107 |
for word in tokendict:
|
108 |
emb = embed_from_text2(word)
|
109 |
+
#emb=emb.unsqueeze(0) # stupid matrix magic to make torch.cat work
|
110 |
all_embeddings.append(emb)
|
111 |
count+=1
|
112 |
if (count %100) ==0:
|
|
|
127 |
embs = torch.cat(all_embeddings,dim=0)
|
128 |
print("Shape of result = ",embs.shape)
|
129 |
|
130 |
+
if len(embs.shape) != 2:
|
131 |
+
print("Sanity check: result is wrong shape: it wont work")
|
132 |
+
|
133 |
print(f"Saving the calculatiuons to {outputfile}...")
|
134 |
save_file({"embeddings": embs}, outputfile)
|
135 |
|