MuhammedSaeed commited on
Commit
3da8607
1 Parent(s): e175f8b

Upload 4 files

Browse files

Knowledge Distillation of BERT Language Model on the Arabic Language , config and model bin folder, the teacher model used to train the student is `asafaya/bert-large-arabic`

Files changed (4) hide show
  1. config.json +21 -0
  2. git_log.json +5 -0
  3. parameters.json +51 -0
  4. pytorch_model.bin +3 -0
config.json ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/local/musaeed/UofkDistill/model/checkpoint.pth",
3
+ "activation": "gelu",
4
+ "attention_dropout": 0.1,
5
+ "dim": 1024,
6
+ "dropout": 0.1,
7
+ "hidden_dim": 4096,
8
+ "initializer_range": 0.02,
9
+ "max_position_embeddings": 512,
10
+ "model_type": "distilbert",
11
+ "n_heads": 16,
12
+ "n_layers": 6,
13
+ "output_hidden_states": true,
14
+ "pad_token_id": 0,
15
+ "qa_dropout": 0.1,
16
+ "seq_classif_dropout": 0.2,
17
+ "sinusoidal_pos_embds": true,
18
+ "tie_weights_": true,
19
+ "transformers_version": "4.24.0",
20
+ "vocab_size": 32000
21
+ }
git_log.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "repo_id": "<git.Repo \"/local/musaeed/UofkDistill/transformers/.git\">",
3
+ "repo_sha": "699e90437f984d69ad3c9b891dd2e9d0fc2cffe4",
4
+ "repo_branch": "main"
5
+ }
parameters.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "force": true,
3
+ "dump_path": "/local/musaeed/UofkDistill/Dumps/",
4
+ "data_file": "/local/musaeed/UofkDistill/transformers/examples/research_projects/distillation/The_data/merged_data_binarized.pickle",
5
+ "student_type": "distilbert",
6
+ "student_config": "/local/musaeed/UofkDistill/transformers_/transformers/examples/research_projects/distillation/training_configs/distilbert-base-uncased.json",
7
+ "student_pretrained_weights": "/local/musaeed/UofkDistill/model/checkpoint.pth",
8
+ "teacher_type": "bert",
9
+ "teacher_name": "asafaya/bert-large-arabic",
10
+ "temperature": 2.0,
11
+ "alpha_ce": 5.0,
12
+ "alpha_mlm": 2.0,
13
+ "alpha_clm": 0.0,
14
+ "alpha_mse": 0.0,
15
+ "alpha_cos": 1.0,
16
+ "mlm": true,
17
+ "mlm_mask_prop": 0.15,
18
+ "word_mask": 0.8,
19
+ "word_keep": 0.1,
20
+ "word_rand": 0.1,
21
+ "mlm_smoothing": 0.7,
22
+ "token_counts": "/local/musaeed/UofkDistill/transformers/examples/research_projects/distillation/The_data/merged_token_count.pickle",
23
+ "restrict_ce_to_mask": false,
24
+ "freeze_pos_embs": true,
25
+ "freeze_token_type_embds": false,
26
+ "n_epoch": 3,
27
+ "batch_size": 16,
28
+ "group_by_size": true,
29
+ "gradient_accumulation_steps": 50,
30
+ "warmup_prop": 0.05,
31
+ "weight_decay": 0.0,
32
+ "learning_rate": 3e-05,
33
+ "adam_epsilon": 1e-06,
34
+ "max_grad_norm": 5.0,
35
+ "initializer_range": 0.02,
36
+ "fp16": false,
37
+ "fp16_opt_level": "O1",
38
+ "n_gpu": 4,
39
+ "local_rank": 0,
40
+ "seed": 56,
41
+ "log_interval": 500,
42
+ "checkpoint_interval": 10000,
43
+ "world_size": 4,
44
+ "n_gpu_per_node": 4,
45
+ "global_rank": 0,
46
+ "n_nodes": 1,
47
+ "node_id": 0,
48
+ "multi_gpu": true,
49
+ "is_master": true,
50
+ "multi_node": false
51
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b22e23014de7252d3fa488bcb494ee2bce4221d27df6f2b87203d0d19b2c7d88
3
+ size 439862319