SiddharthaM commited on
Commit
42fe1dc
1 Parent(s): 58813f3

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -1,43 +1,34 @@
1
  {
2
- "_name_or_path": "microsoft/mdeberta-v3-base",
 
3
  "architectures": [
4
- "DebertaV2ForSequenceClassification"
5
  ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "hidden_act": "gelu",
8
- "hidden_dropout_prob": 0.1,
9
- "hidden_size": 768,
10
  "id2label": {
11
  "0": "0",
12
  "1": "1"
13
  },
14
  "initializer_range": 0.02,
15
- "intermediate_size": 3072,
16
  "label2id": {
17
  "0": "0",
18
  "1": "1"
19
  },
20
- "layer_norm_eps": 1e-07,
21
  "max_position_embeddings": 512,
22
- "max_relative_positions": -1,
23
- "model_type": "deberta-v2",
24
- "norm_rel_ebd": "layer_norm",
25
- "num_attention_heads": 12,
26
- "num_hidden_layers": 12,
27
  "pad_token_id": 0,
28
- "pooler_dropout": 0,
29
- "pooler_hidden_act": "gelu",
30
- "pooler_hidden_size": 768,
31
- "pos_att_type": [
32
- "p2c",
33
- "c2p"
34
- ],
35
- "position_biased_input": false,
36
- "position_buckets": 256,
37
- "relative_attention": true,
38
- "share_att_key": true,
39
  "torch_dtype": "float32",
40
  "transformers_version": "4.24.0.dev0",
41
- "type_vocab_size": 0,
42
- "vocab_size": 251000
43
  }
 
1
  {
2
+ "_name_or_path": "distilbert-base-multilingual-cased",
3
+ "activation": "gelu",
4
  "architectures": [
5
+ "DistilBertForSequenceClassification"
6
  ],
7
+ "attention_dropout": 0.1,
8
+ "dim": 768,
9
+ "dropout": 0.1,
10
+ "hidden_dim": 3072,
11
  "id2label": {
12
  "0": "0",
13
  "1": "1"
14
  },
15
  "initializer_range": 0.02,
 
16
  "label2id": {
17
  "0": "0",
18
  "1": "1"
19
  },
 
20
  "max_position_embeddings": 512,
21
+ "model_type": "distilbert",
22
+ "n_heads": 12,
23
+ "n_layers": 6,
24
+ "output_past": true,
 
25
  "pad_token_id": 0,
26
+ "problem_type": "single_label_classification",
27
+ "qa_dropout": 0.1,
28
+ "seq_classif_dropout": 0.2,
29
+ "sinusoidal_pos_embds": false,
30
+ "tie_weights_": true,
 
 
 
 
 
 
31
  "torch_dtype": "float32",
32
  "transformers_version": "4.24.0.dev0",
33
+ "vocab_size": 119547
 
34
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8471a9e6719f741d86186005c6f594947e101e22ac064cce6f289bff09d2d5a5
3
- size 1115315755
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b6bf6bca8884d129db65a1a0db0f9150846b8cd43db8c7c3db05c20665b6f24
3
+ size 541339121
runs/Dec19_23-12-21_teesta/events.out.tfevents.1671473691.teesta.2078.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8503a6984f7d2eec39d0d17cb13229249d25f23c812a4d95c8e01e0970dae5f
3
+ size 512
runs/Dec21_18-56-11_teesta/1671629182.092958/events.out.tfevents.1671629182.teesta.16256.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:267da261893291b8928af9af38f56832d8d37425b536cde86f4b29dabeb3f1a4
3
+ size 5471
runs/Dec21_18-56-11_teesta/events.out.tfevents.1671629182.teesta.16256.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68fed457d040af7ad6c0fb74a9803be11603bf18ce947478df757a3802e79e9b
3
+ size 4196
special_tokens_map.json CHANGED
@@ -1,7 +1,5 @@
1
  {
2
- "bos_token": "[CLS]",
3
  "cls_token": "[CLS]",
4
- "eos_token": "[SEP]",
5
  "mask_token": "[MASK]",
6
  "pad_token": "[PAD]",
7
  "sep_token": "[SEP]",
 
1
  {
 
2
  "cls_token": "[CLS]",
 
3
  "mask_token": "[MASK]",
4
  "pad_token": "[PAD]",
5
  "sep_token": "[SEP]",
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5b128f3eb3c16ebe674b34492da141a26ed841f594763465ade051b15d71650c
3
- size 16331456
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a4a4fa4b10a2a40361529954532f10257fda53cb22177554a862bcc661b7e4d8
3
+ size 2919615
tokenizer_config.json CHANGED
@@ -1,16 +1,14 @@
1
  {
2
- "bos_token": "[CLS]",
3
  "cls_token": "[CLS]",
4
  "do_lower_case": false,
5
- "eos_token": "[SEP]",
6
  "mask_token": "[MASK]",
7
- "name_or_path": "microsoft/mdeberta-v3-base",
 
8
  "pad_token": "[PAD]",
9
  "sep_token": "[SEP]",
10
- "sp_model_kwargs": {},
11
  "special_tokens_map_file": null,
12
- "split_by_punct": false,
13
- "tokenizer_class": "DebertaV2Tokenizer",
14
- "unk_token": "[UNK]",
15
- "vocab_type": "spm"
16
  }
 
1
  {
 
2
  "cls_token": "[CLS]",
3
  "do_lower_case": false,
 
4
  "mask_token": "[MASK]",
5
+ "model_max_length": 512,
6
+ "name_or_path": "distilbert-base-multilingual-cased",
7
  "pad_token": "[PAD]",
8
  "sep_token": "[SEP]",
 
9
  "special_tokens_map_file": null,
10
+ "strip_accents": null,
11
+ "tokenize_chinese_chars": true,
12
+ "tokenizer_class": "DistilBertTokenizer",
13
+ "unk_token": "[UNK]"
14
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a5719e58318244555db48767abed63b2448d6eefe80ec1db64c2ae0f9bbdb06f
3
  size 3375
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f81633a5880f247c7c04777ce9b5da37285a60d0b20cec19f8233da1107ad0af
3
  size 3375
vocab.txt ADDED
The diff for this file is too large to render. See raw diff