Update model.py
Browse files
model.py
CHANGED
@@ -9,26 +9,7 @@ import torch
|
|
9 |
import torch.nn as nn
|
10 |
import transformers
|
11 |
|
12 |
-
|
13 |
-
class ContextualModelConfig(transformers.configuration_utils.PretrainedConfig):
|
14 |
-
"""We create a dummy configuration class that will just set properties
|
15 |
-
based on whatever kwargs we pass in.
|
16 |
-
|
17 |
-
When this class is initialized (see experiments.py) we pass in the
|
18 |
-
union of all data, model, and training args, all of which should
|
19 |
-
get saved to the config json.
|
20 |
-
"""
|
21 |
-
|
22 |
-
def __init__(self, **kwargs):
|
23 |
-
for key, value in kwargs.items():
|
24 |
-
try:
|
25 |
-
json.dumps(value)
|
26 |
-
setattr(self, key, value)
|
27 |
-
except TypeError:
|
28 |
-
# value was not JSON-serializable, skip
|
29 |
-
continue
|
30 |
-
super().__init__()
|
31 |
-
|
32 |
|
33 |
def load_embedder_and_tokenizer(name: str) -> Tuple[
|
34 |
transformers.PreTrainedModel,
|
|
|
9 |
import torch.nn as nn
|
10 |
import transformers
|
11 |
|
12 |
+
from misc import ContextualModelConfig
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
14 |
def load_embedder_and_tokenizer(name: str) -> Tuple[
|
15 |
transformers.PreTrainedModel,
|