louis030195 commited on
Commit
dea3735
1 Parent(s): 9c9744b

Add new SentenceTransformer model.

Browse files
Files changed (4) hide show
  1. .gitattributes +1 -0
  2. config.json +1 -1
  3. pytorch_model.bin +1 -1
  4. tokenizer_config.json +1 -1
.gitattributes CHANGED
@@ -33,3 +33,4 @@ pytorch_model.bin filter=lfs diff=lfs merge=lfs -text
33
  .git/lfs/objects/19/d2/19d20b91035747f71fafa7e71925972c330c16e9d88537b998c04de224f6a582 filter=lfs diff=lfs merge=lfs -text
34
  .git/lfs/objects/08/f6/08f6b90da60e0ad987449607196b699700aa45e545d9ac46b63bcfe5565779c5 filter=lfs diff=lfs merge=lfs -text
35
  .git/lfs/objects/5b/6f/5b6f04f3622b81b47a1d1f77ad9bed8fe96311a5deb06643cf8bd3ec54ad9833 filter=lfs diff=lfs merge=lfs -text
 
 
33
  .git/lfs/objects/19/d2/19d20b91035747f71fafa7e71925972c330c16e9d88537b998c04de224f6a582 filter=lfs diff=lfs merge=lfs -text
34
  .git/lfs/objects/08/f6/08f6b90da60e0ad987449607196b699700aa45e545d9ac46b63bcfe5565779c5 filter=lfs diff=lfs merge=lfs -text
35
  .git/lfs/objects/5b/6f/5b6f04f3622b81b47a1d1f77ad9bed8fe96311a5deb06643cf8bd3ec54ad9833 filter=lfs diff=lfs merge=lfs -text
36
+ .git/lfs/objects/be/d2/bed27936f175f49ec85e499822005771c12a5441a573dc172c11861c892de77b filter=lfs diff=lfs merge=lfs -text
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "./output/multi-qa-MiniLM-L6-cos-v1-obsidian",
3
  "architectures": [
4
  "BertModel"
5
  ],
 
1
  {
2
+ "_name_or_path": "sentence-transformers/multi-qa-MiniLM-L6-cos-v1",
3
  "architectures": [
4
  "BertModel"
5
  ],
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bed27936f175f49ec85e499822005771c12a5441a573dc172c11861c892de77b
3
  size 90888945
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86d28f109ababf090128daa3500aa1c27a8ca840f696b66e98eb8fbb237faa56
3
  size 90888945
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "special_tokens_map_file": null, "name_or_path": "./output/multi-qa-MiniLM-L6-cos-v1-obsidian", "do_basic_tokenize": true, "never_split": null, "model_max_length": 512, "tokenizer_class": "BertTokenizer"}
 
1
+ {"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "special_tokens_map_file": null, "name_or_path": "sentence-transformers/multi-qa-MiniLM-L6-cos-v1", "do_basic_tokenize": true, "never_split": null, "model_max_length": 512, "tokenizer_class": "BertTokenizer"}