[ { "name":"SGPT-125M-Search", "model":"Muennighoff/SGPT-125M-weightedmean-msmarco-specb-bitfit", "fork_url":"https://github.com/taskswithcode/sgpt", "orig_author_url":"https://github.com/Muennighoff", "orig_author":"Niklas Muennighoff", "sota_info": { "task":"#1 in multiple information retrieval & search tasks(smaller variant)", "sota_link":"https://paperswithcode.com/paper/sgpt-gpt-sentence-embeddings-for-semantic" }, "paper_url":"https://arxiv.org/abs/2202.08904v5", "mark":"True", "class":"SGPTQnAModel"}, { "name":"GPT-Neo-125M", "model":"EleutherAI/gpt-neo-125M", "fork_url":"https://github.com/taskswithcode/sgpt", "orig_author_url":"https://www.eleuther.ai/", "orig_author":"EleuthorAI", "sota_info": { "task":"Top 20 in multiple NLP tasks (smaller variant)", "sota_link":"https://paperswithcode.com/paper/gpt-neox-20b-an-open-source-autoregressive-1" }, "paper_url":"https://zenodo.org/record/5551208#.YyV0k-zMLX0", "mark":"True", "class":"CausalLMModel"}, { "name":"sentence-transformers/all-MiniLM-L6-v2", "model":"sentence-transformers/all-MiniLM-L6-v2", "fork_url":"https://github.com/taskswithcode/sentence_similarity_hf_model", "orig_author_url":"https://github.com/UKPLab", "orig_author":"Ubiquitous Knowledge Processing Lab", "sota_info": { "task":"Over 3.8 million downloads from huggingface", "sota_link":"https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2" }, "paper_url":"https://arxiv.org/abs/1908.10084", "mark":"True", "class":"HFModel"}, { "name":"sentence-transformers/paraphrase-MiniLM-L6-v2", "model":"sentence-transformers/paraphrase-MiniLM-L6-v2", "fork_url":"https://github.com/taskswithcode/sentence_similarity_hf_model", "orig_author_url":"https://github.com/UKPLab", "orig_author":"Ubiquitous Knowledge Processing Lab", "sota_info": { "task":"Over 2 million downloads from huggingface", "sota_link":"https://huggingface.co/sentence-transformers/paraphrase-MiniLM-L6-v2" }, "paper_url":"https://arxiv.org/abs/1908.10084", "mark":"True", "class":"HFModel"}, { "name":"sentence-transformers/bert-base-nli-mean-tokens", "model":"sentence-transformers/bert-base-nli-mean-tokens", "fork_url":"https://github.com/taskswithcode/sentence_similarity_hf_model", "orig_author_url":"https://github.com/UKPLab", "orig_author":"Ubiquitous Knowledge Processing Lab", "sota_info": { "task":"Over 700,000 downloads from huggingface", "sota_link":"https://huggingface.co/sentence-transformers/bert-base-nli-mean-tokens" }, "paper_url":"https://arxiv.org/abs/1908.10084", "mark":"True", "class":"HFModel"}, { "name":"sentence-transformers/all-mpnet-base-v2", "model":"sentence-transformers/all-mpnet-base-v2", "fork_url":"https://github.com/taskswithcode/sentence_similarity_hf_model", "orig_author_url":"https://github.com/UKPLab", "orig_author":"Ubiquitous Knowledge Processing Lab", "sota_info": { "task":"Over 500,000 downloads from huggingface", "sota_link":"https://huggingface.co/sentence-transformers/all-mpnet-base-v2" }, "paper_url":"https://arxiv.org/abs/1908.10084", "mark":"True", "class":"HFModel"}, { "name":"sentence-transformers/all-MiniLM-L12-v2", "model":"sentence-transformers/all-MiniLM-L12-v2", "fork_url":"https://github.com/taskswithcode/sentence_similarity_hf_model", "orig_author_url":"https://github.com/UKPLab", "orig_author":"Ubiquitous Knowledge Processing Lab", "sota_info": { "task":"Over 500,000 downloads from huggingface", "sota_link":"https://huggingface.co/sentence-transformers/all-MiniLM-L12-v2" }, "paper_url":"https://arxiv.org/abs/1908.10084", "mark":"True", "class":"HFModel"}, { "name":"SGPT-125M", "model":"Muennighoff/SGPT-125M-weightedmean-nli-bitfit", "fork_url":"https://github.com/taskswithcode/sgpt", "orig_author_url":"https://github.com/Muennighoff", "orig_author":"Niklas Muennighoff", "sota_info": { "task":"#1 in multiple information retrieval & search tasks(smaller variant)", "sota_link":"https://paperswithcode.com/paper/sgpt-gpt-sentence-embeddings-for-semantic" }, "paper_url":"https://arxiv.org/abs/2202.08904v5", "mark":"True", "class":"SGPTModel"}, { "name":"SIMCSE-base" , "model":"princeton-nlp/sup-simcse-roberta-base", "fork_url":"https://github.com/taskswithcode/SimCSE", "orig_author_url":"https://github.com/princeton-nlp", "orig_author":"Princeton Natural Language Processing", "sota_info": { "task":"Within top 10 in multiple semantic textual similarity tasks(smaller variant)", "sota_link":"https://paperswithcode.com/paper/simcse-simple-contrastive-learning-of" }, "paper_url":"https://arxiv.org/abs/2104.08821v4", "mark":"True", "class":"SimCSEModel","sota_link":"https://paperswithcode.com/sota/semantic-textual-similarity-on-sick"} ]