vpj commited on
Commit
9d34dbf
1 Parent(s): 3a3733c
Files changed (2) hide show
  1. config.json +1 -0
  2. configuration_geov.py +6 -3
config.json CHANGED
@@ -17,6 +17,7 @@
17
  "num_hidden_layers": 32,
18
  "rotary_emb_base": 10000,
19
  "tie_word_embeddings": false,
 
20
  "torch_dtype": "bfloat16",
21
  "transformers_version": "4.28.0.dev0",
22
  "use_cache": true,
17
  "num_hidden_layers": 32,
18
  "rotary_emb_base": 10000,
19
  "tie_word_embeddings": false,
20
+ "tokenizer_class": "GeoVTokenizer",
21
  "torch_dtype": "bfloat16",
22
  "transformers_version": "4.28.0.dev0",
23
  "use_cache": true,
configuration_geov.py CHANGED
@@ -13,7 +13,6 @@
13
  # See the License for the specific language governing permissions and
14
  # limitations under the License.
15
  """ GeoV model configuration"""
16
- import geov.tokenization_geov
17
  from transformers.configuration_utils import PretrainedConfig
18
  from transformers.utils import logging
19
 
@@ -74,7 +73,6 @@ class GeoVConfig(PretrainedConfig):
74
  >>> configuration = model.config # doctest: +SKIP
75
  ```"""
76
  model_type = "geov"
77
- tokenizer_class = geov.tokenization_geov.GeoVTokenizer
78
 
79
  def __init__(
80
  self,
@@ -91,10 +89,15 @@ class GeoVConfig(PretrainedConfig):
91
  bos_token_id=0,
92
  eos_token_id=2,
93
  tie_word_embeddings=False,
 
94
  **kwargs,
95
  ):
96
  super().__init__(
97
- bos_token_id=bos_token_id, eos_token_id=eos_token_id, tie_word_embeddings=tie_word_embeddings, **kwargs
 
 
 
 
98
  )
99
  self.vocab_size = vocab_size
100
  self.max_position_embeddings = max_position_embeddings
13
  # See the License for the specific language governing permissions and
14
  # limitations under the License.
15
  """ GeoV model configuration"""
 
16
  from transformers.configuration_utils import PretrainedConfig
17
  from transformers.utils import logging
18
 
73
  >>> configuration = model.config # doctest: +SKIP
74
  ```"""
75
  model_type = "geov"
 
76
 
77
  def __init__(
78
  self,
89
  bos_token_id=0,
90
  eos_token_id=2,
91
  tie_word_embeddings=False,
92
+ tokenizer_class="GeoVTokenizer",
93
  **kwargs,
94
  ):
95
  super().__init__(
96
+ bos_token_id=bos_token_id,
97
+ eos_token_id=eos_token_id,
98
+ tie_word_embeddings=tie_word_embeddings,
99
+ tokenizer_class=tokenizer_class,
100
+ **kwargs
101
  )
102
  self.vocab_size = vocab_size
103
  self.max_position_embeddings = max_position_embeddings