vish88 commited on
Commit
994c4b5
1 Parent(s): 7f6974b

Training in progress, epoch 0

Browse files
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ checkpoint-*/
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
- "_name_or_path": "deberta-base-mrpc-finetuned/checkpoint-36813",
3
  "architectures": [
4
- "BertModel"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "classifier_dropout": null,
@@ -30,7 +30,7 @@
30
  "position_embedding_type": "absolute",
31
  "problem_type": "single_label_classification",
32
  "torch_dtype": "float32",
33
- "transformers_version": "4.26.0",
34
  "type_vocab_size": 2,
35
  "use_cache": true,
36
  "vocab_size": 30000
 
1
  {
2
+ "_name_or_path": "CAMeL-Lab/bert-base-arabic-camelbert-msa-sixteenth",
3
  "architectures": [
4
+ "BertForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "classifier_dropout": null,
 
30
  "position_embedding_type": "absolute",
31
  "problem_type": "single_label_classification",
32
  "torch_dtype": "float32",
33
+ "transformers_version": "4.26.1",
34
  "type_vocab_size": 2,
35
  "use_cache": true,
36
  "vocab_size": 30000
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:594a8113c349a7d780a37319c8025df102154e1941e4f6e09f89e1623dac35e2
3
- size 436393773
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93d3755199928d4a18443929d6dac1e066de9d39889912265c7b3dbb867e9762
3
+ size 436407413
runs/Feb15_12-27-28_f079c7bd7594/1676464170.6076372/events.out.tfevents.1676464170.f079c7bd7594.3239.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c83803878135953202cd61a1f88d2d479af9fac62a15682f1e8143d030048c1c
3
+ size 5783
runs/Feb15_12-27-28_f079c7bd7594/events.out.tfevents.1676464170.f079c7bd7594.3239.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6cf62fbae530770dd79f608e42b173f4bb0325898057084a0035cde9fc382dad
3
+ size 8244
tokenizer_config.json CHANGED
@@ -5,7 +5,7 @@
5
  "full_tokenizer_file": null,
6
  "mask_token": "[MASK]",
7
  "model_max_length": 1000000000000000019884624838656,
8
- "name_or_path": "deberta-base-mrpc-finetuned/checkpoint-36813",
9
  "never_split": null,
10
  "pad_token": "[PAD]",
11
  "sep_token": "[SEP]",
 
5
  "full_tokenizer_file": null,
6
  "mask_token": "[MASK]",
7
  "model_max_length": 1000000000000000019884624838656,
8
+ "name_or_path": "CAMeL-Lab/bert-base-arabic-camelbert-msa-sixteenth",
9
  "never_split": null,
10
  "pad_token": "[PAD]",
11
  "sep_token": "[SEP]",
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb431ae514d10ffb1c12f81825be19a36424c781aae1b0734c27e1048b36540a
3
+ size 3579