ravfogs commited on
Commit
72f2fa2
1 Parent(s): 6dff79c

Create config.json

Browse files
Files changed (1) hide show
  1. config.json +68 -0
config.json ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {'return_dict': True,
2
+ 'output_hidden_states': False,
3
+ 'output_attentions': False,
4
+ 'torchscript': False,
5
+ 'torch_dtype': None,
6
+ 'use_bfloat16': False,
7
+ 'tf_legacy_loss': False,
8
+ 'pruned_heads': {},
9
+ 'tie_word_embeddings': True,
10
+ 'is_encoder_decoder': False,
11
+ 'is_decoder': False,
12
+ 'cross_attention_hidden_size': None,
13
+ 'add_cross_attention': False,
14
+ 'tie_encoder_decoder': False,
15
+ 'max_length': 20,
16
+ 'min_length': 0,
17
+ 'do_sample': False,
18
+ 'early_stopping': False,
19
+ 'num_beams': 1,
20
+ 'num_beam_groups': 1,
21
+ 'diversity_penalty': 0.0,
22
+ 'temperature': 1.0,
23
+ 'top_k': 50,
24
+ 'top_p': 1.0,
25
+ 'typical_p': 1.0,
26
+ 'repetition_penalty': 1.0,
27
+ 'length_penalty': 1.0,
28
+ 'no_repeat_ngram_size': 0,
29
+ 'encoder_no_repeat_ngram_size': 0,
30
+ 'bad_words_ids': None,
31
+ 'num_return_sequences': 1,
32
+ 'chunk_size_feed_forward': 0,
33
+ 'output_scores': False,
34
+ 'return_dict_in_generate': False,
35
+ 'forced_bos_token_id': None,
36
+ 'forced_eos_token_id': None,
37
+ 'remove_invalid_values': False,
38
+ 'exponential_decay_length_penalty': None,
39
+ 'suppress_tokens': None,
40
+ 'begin_suppress_tokens': None,
41
+ 'architectures': ['MPNetForMaskedLM'],
42
+ 'finetuning_task': None,
43
+ 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'},
44
+ 'label2id': {'LABEL_0': 0, 'LABEL_1': 1},
45
+ 'tokenizer_class': None,
46
+ 'prefix': None,
47
+ 'bos_token_id': 0,
48
+ 'pad_token_id': 1,
49
+ 'eos_token_id': 2,
50
+ 'sep_token_id': None,
51
+ 'decoder_start_token_id': None,
52
+ 'task_specific_params': None,
53
+ 'problem_type': None,
54
+ '_name_or_path': 'microsoft/mpnet-base',
55
+ 'transformers_version': '4.25.1',
56
+ 'model_type': 'mpnet',
57
+ 'vocab_size': 30527,
58
+ 'hidden_size': 768,
59
+ 'num_hidden_layers': 12,
60
+ 'num_attention_heads': 12,
61
+ 'hidden_act': 'gelu',
62
+ 'intermediate_size': 3072,
63
+ 'hidden_dropout_prob': 0.1,
64
+ 'attention_probs_dropout_prob': 0.1,
65
+ 'max_position_embeddings': 514,
66
+ 'initializer_range': 0.02,
67
+ 'layer_norm_eps': 1e-05,
68
+ 'relative_attention_num_buckets': 32}