Jitin commited on
Commit
da7f539
1 Parent(s): 14338c7

LM iteration of manglish config

Browse files
Files changed (2) hide show
  1. config.json +5 -2
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -1,5 +1,9 @@
1
  {
2
- "_name_or_path": "bert-base-multilingual-cased",
 
 
 
 
3
  "architectures": [
4
  "RobertaForMaskedLM"
5
  ],
@@ -18,7 +22,6 @@
18
  "num_attention_heads": 12,
19
  "num_hidden_layers": 6,
20
  "pad_token_id": 1,
21
- "position_embedding_type": "absolute",
22
  "type_vocab_size": 1,
23
  "vocab_size": 52000
24
  }
1
  {
2
+ "_name_or_path": "drive/My Drive/Colab Notebooks/ML-Model",
3
+ "adapters": {
4
+ "adapters": {},
5
+ "config_map": {}
6
+ },
7
  "architectures": [
8
  "RobertaForMaskedLM"
9
  ],
22
  "num_attention_heads": 12,
23
  "num_hidden_layers": 6,
24
  "pad_token_id": 1,
 
25
  "type_vocab_size": 1,
26
  "vocab_size": 52000
27
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7add32cb8af632307ff65f68c4d5380a92459dfde9ceb518a0f42c92ed5f8c32
3
- size 334066395
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3fa47793c7c2681f618c711c5a85ee8109036d12600ecffac10fca123ddd5f60
3
+ size 334064923