Update README.md
Browse files
README.md
CHANGED
@@ -129,7 +129,7 @@ The input to the model is called `logits`, and there is one output per label. Ea
|
|
129 |
# E.g. produced from sentence-transformers E.g. huggingface.co/BAAI/bge-small-en-v1.5
|
130 |
# or from an ONNX version E.g. huggingface.co/Xenova/bge-small-en-v1.5
|
131 |
|
132 |
-
print(
|
133 |
> (1, 384)
|
134 |
|
135 |
import onnxruntime as ort
|
@@ -137,7 +137,7 @@ import onnxruntime as ort
|
|
137 |
sess = ort.InferenceSession("path_to_model_dot_onnx", providers=['CPUExecutionProvider'])
|
138 |
|
139 |
outputs = [o.name for o in sess.get_outputs()] # list of labels, in the order of the outputs
|
140 |
-
preds_onnx = sess.run(_outputs, {'logits':
|
141 |
# preds_onnx is a list with 28 entries, one per label,
|
142 |
# each with a numpy array of shape (1, 2) given the input was a batch of 1
|
143 |
|
|
|
129 |
# E.g. produced from sentence-transformers E.g. huggingface.co/BAAI/bge-small-en-v1.5
|
130 |
# or from an ONNX version E.g. huggingface.co/Xenova/bge-small-en-v1.5
|
131 |
|
132 |
+
print(embeddings.shape) # E.g. a batch of 1 sentence
|
133 |
> (1, 384)
|
134 |
|
135 |
import onnxruntime as ort
|
|
|
137 |
sess = ort.InferenceSession("path_to_model_dot_onnx", providers=['CPUExecutionProvider'])
|
138 |
|
139 |
outputs = [o.name for o in sess.get_outputs()] # list of labels, in the order of the outputs
|
140 |
+
preds_onnx = sess.run(_outputs, {'logits': embeddings})
|
141 |
# preds_onnx is a list with 28 entries, one per label,
|
142 |
# each with a numpy array of shape (1, 2) given the input was a batch of 1
|
143 |
|