Delete config.json
Browse files- config.json +0 -32
config.json
DELETED
@@ -1,32 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"_name_or_path": "/fsx-checkpoints/jxm/cde/2024-09-18-supervised-final-bge--epoch-4/checkpoint-1820",
|
3 |
-
"architecture": "transductive",
|
4 |
-
"architectures": [
|
5 |
-
"DatasetTransformer"
|
6 |
-
],
|
7 |
-
"attn_implementation": null,
|
8 |
-
"auto_map": {
|
9 |
-
"AutoConfig": "misc.ContextualModelConfig",
|
10 |
-
"AutoModel": "model.DatasetTransformer"
|
11 |
-
},
|
12 |
-
"biencoder_pooling_strategy": "mean",
|
13 |
-
"cache_dir": null,
|
14 |
-
"config_name": null,
|
15 |
-
"disable_dropout": true,
|
16 |
-
"disable_transductive_rotary_embedding": true,
|
17 |
-
"embedder": "nomic-ai/nomic-bert-2048",
|
18 |
-
"embedder_rerank": "sentence-transformers/gtr-t5-base",
|
19 |
-
"embedding_output_dim": null,
|
20 |
-
"limit_layers": null,
|
21 |
-
"limit_layers_first_stage": null,
|
22 |
-
"logit_scale": 50.0,
|
23 |
-
"max_seq_length": 512,
|
24 |
-
"model_revision": "main",
|
25 |
-
"tokenizer_name": null,
|
26 |
-
"torch_dtype": "float32",
|
27 |
-
"transductive_corpus_size": 512,
|
28 |
-
"transductive_sequence_dropout_prob": 0.0,
|
29 |
-
"transductive_tie_token_embeddings": false,
|
30 |
-
"transductive_tokens_per_document": 1,
|
31 |
-
"transformers_version": "4.44.2"
|
32 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|