|
{ |
|
"architecture": "transductive", |
|
"architectures": [ |
|
"BiEncoder" |
|
], |
|
"biencoder_pooling_strategy": "mean", |
|
"cache_dir": null, |
|
"config_name": null, |
|
"disable_dropout": true, |
|
"disable_transductive_rotary_embedding": true, |
|
"embedder": "nomic-ai/nomic-bert-2048", |
|
"embedder_rerank": "sentence-transformers/gtr-t5-base", |
|
"embedding_output_dim": null, |
|
"limit_layers": null, |
|
"limit_layers_first_stage": null, |
|
"logit_scale": 50.0, |
|
"max_seq_length": 512, |
|
"model_revision": "main", |
|
"tokenizer_name": null, |
|
"torch_dtype": "float32", |
|
"transductive_corpus_size": 512, |
|
"transductive_sequence_dropout_prob": 0.0, |
|
"transductive_tie_token_embeddings": false, |
|
"transductive_tokens_per_document": 1, |
|
"transformers_version": "4.44.2" |
|
} |
|
|