TransQuest commited on
Commit
c877ba8
1 Parent(s): e45be7f

from Google Colab

Browse files
Files changed (3) hide show
  1. config.json +54 -6
  2. model_args.json +1 -0
  3. pooling_config.json +7 -0
config.json CHANGED
@@ -1,7 +1,55 @@
1
  {
2
- "word_embedding_dimension": 1024,
3
- "pooling_mode_cls_token": false,
4
- "pooling_mode_mean_tokens": true,
5
- "pooling_mode_max_tokens": false,
6
- "pooling_mode_mean_sqrt_len_tokens": false
7
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  {
2
+ "_num_labels": 2,
3
+ "architectures": [
4
+ "XLMRobertaModel"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bad_words_ids": null,
8
+ "bos_token_id": 0,
9
+ "decoder_start_token_id": null,
10
+ "do_sample": false,
11
+ "early_stopping": false,
12
+ "eos_token_id": 2,
13
+ "finetuning_task": null,
14
+ "hidden_act": "gelu",
15
+ "hidden_dropout_prob": 0.1,
16
+ "hidden_size": 1024,
17
+ "id2label": {
18
+ "0": "LABEL_0",
19
+ "1": "LABEL_1"
20
+ },
21
+ "initializer_range": 0.02,
22
+ "intermediate_size": 4096,
23
+ "is_decoder": false,
24
+ "is_encoder_decoder": false,
25
+ "label2id": {
26
+ "LABEL_0": 0,
27
+ "LABEL_1": 1
28
+ },
29
+ "layer_norm_eps": 1e-05,
30
+ "length_penalty": 1.0,
31
+ "max_length": 20,
32
+ "max_position_embeddings": 514,
33
+ "min_length": 0,
34
+ "model_type": "xlm-roberta",
35
+ "no_repeat_ngram_size": 0,
36
+ "num_attention_heads": 16,
37
+ "num_beams": 1,
38
+ "num_hidden_layers": 24,
39
+ "num_return_sequences": 1,
40
+ "output_attentions": false,
41
+ "output_hidden_states": false,
42
+ "output_past": true,
43
+ "pad_token_id": 1,
44
+ "prefix": null,
45
+ "pruned_heads": {},
46
+ "repetition_penalty": 1.0,
47
+ "task_specific_params": null,
48
+ "temperature": 1.0,
49
+ "top_k": 50,
50
+ "top_p": 1.0,
51
+ "torchscript": false,
52
+ "type_vocab_size": 1,
53
+ "use_bfloat16": false,
54
+ "vocab_size": 250002
55
+ }
model_args.json ADDED
@@ -0,0 +1 @@
 
1
+ {"adam_epsilon": 1e-08, "best_model_dir": "temp/outputs/best_model", "cache_dir": "temp/cache_dir/", "config": {}, "cosine_schedule_num_cycles": 0.5, "custom_layer_parameters": [], "custom_parameter_groups": [], "dataloader_num_workers": 0, "do_lower_case": false, "dynamic_quantize": false, "early_stopping_consider_epochs": false, "early_stopping_delta": 0, "early_stopping_metric": "eval_loss", "early_stopping_metric_minimize": true, "early_stopping_patience": 10, "encoding": null, "adafactor_eps": [1e-30, 0.001], "adafactor_clip_threshold": 1.0, "adafactor_decay_rate": -0.8, "adafactor_beta1": null, "adafactor_scale_parameter": true, "adafactor_relative_step": true, "adafactor_warmup_init": true, "eval_batch_size": 8, "evaluate_during_training": true, "evaluate_during_training_silent": true, "evaluate_during_training_steps": 100, "evaluate_during_training_verbose": true, "evaluate_each_epoch": true, "fp16": false, "gradient_accumulation_steps": 1, "learning_rate": 1e-05, "local_rank": -1, "logging_steps": 100, "manual_seed": 777, "max_grad_norm": 1.0, "max_seq_length": 80, "model_name": null, "model_type": null, "multiprocessing_chunksize": 500, "n_gpu": 1, "no_cache": false, "no_save": false, "not_saved_args": [], "num_train_epochs": 6, "optimizer": "AdamW", "output_dir": "temp/outputs/", "overwrite_output_dir": true, "process_count": 1, "polynomial_decay_schedule_lr_end": 1e-07, "polynomial_decay_schedule_power": 1.0, "quantized_model": false, "reprocess_input_data": true, "save_best_model": true, "save_eval_checkpoints": true, "save_model_every_epoch": true, "save_optimizer_and_scheduler": true, "save_recent_only": true, "save_steps": 100, "scheduler": "linear_schedule_with_warmup", "silent": false, "skip_special_tokens": true, "tensorboard_dir": null, "thread_count": null, "train_batch_size": 8, "train_custom_parameters_only": false, "use_cached_eval_features": false, "use_early_stopping": true, "use_multiprocessing": true, "wandb_kwargs": {}, "wandb_project": null, "warmup_ratio": 0.06, "warmup_steps": 0, "weight_decay": 0, "model_class": "SiameseTransQuestModel", "labels_list": [], "labels_map": {}, "lazy_delimiter": "\t", "lazy_labels_column": 1, "lazy_loading": false, "lazy_loading_start_line": 1, "lazy_text_a_column": null, "lazy_text_b_column": null, "lazy_text_column": 0, "onnx": false, "regression": true, "sliding_window": false, "special_tokens_list": [], "stride": 0.8, "tie_value": 1}
pooling_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
1
+ {
2
+ "word_embedding_dimension": 1024,
3
+ "pooling_mode_cls_token": false,
4
+ "pooling_mode_mean_tokens": true,
5
+ "pooling_mode_max_tokens": false,
6
+ "pooling_mode_mean_sqrt_len_tokens": false
7
+ }