bwang0911 commited on
Commit
8f1b0a2
1 Parent(s): b791d38

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +3 -2
README.md CHANGED
@@ -1130,7 +1130,7 @@ def mean_pooling(model_output, attention_mask):
1130
  sentences = ['How is the weather today?', '今天天气怎么样?']
1131
 
1132
  tokenizer = AutoTokenizer.from_pretrained('jinaai/jina-embeddings-v2-base-zh')
1133
- model = AutoModel.from_pretrained('jinaai/jina-embeddings-v2-base-zh', trust_remote_code=True)
1134
 
1135
  encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
1136
 
@@ -1148,11 +1148,12 @@ You can use Jina Embedding models directly from transformers package.
1148
 
1149
  ```python
1150
  !pip install transformers
 
1151
  from transformers import AutoModel
1152
  from numpy.linalg import norm
1153
 
1154
  cos_sim = lambda a,b: (a @ b.T) / (norm(a)*norm(b))
1155
- model = AutoModel.from_pretrained('jinaai/jina-embeddings-v2-base-zh', trust_remote_code=True) # trust_remote_code is needed to use the encode method
1156
  embeddings = model.encode(['How is the weather today?', '今天天气怎么样?'])
1157
  print(cos_sim(embeddings[0], embeddings[1]))
1158
  ```
 
1130
  sentences = ['How is the weather today?', '今天天气怎么样?']
1131
 
1132
  tokenizer = AutoTokenizer.from_pretrained('jinaai/jina-embeddings-v2-base-zh')
1133
+ model = AutoModel.from_pretrained('jinaai/jina-embeddings-v2-base-zh', trust_remote_code=True, torch_dtype=torch.bfloat16)
1134
 
1135
  encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
1136
 
 
1148
 
1149
  ```python
1150
  !pip install transformers
1151
+ import torch
1152
  from transformers import AutoModel
1153
  from numpy.linalg import norm
1154
 
1155
  cos_sim = lambda a,b: (a @ b.T) / (norm(a)*norm(b))
1156
+ model = AutoModel.from_pretrained('jinaai/jina-embeddings-v2-base-zh', trust_remote_code=True, torch_dtype=torch.bfloat16)
1157
  embeddings = model.encode(['How is the weather today?', '今天天气怎么样?'])
1158
  print(cos_sim(embeddings[0], embeddings[1]))
1159
  ```