izhx Xenova HF staff commited on
Commit
bb3242d
1 Parent(s): 42499ad

Add transformers.js example code (#6)

Browse files

- Add transformers.js example code (f91abb96989460f391cea54e86c52049dd5599ff)


Co-authored-by: Joshua <Xenova@users.noreply.huggingface.co>

Files changed (1) hide show
  1. README.md +27 -0
README.md CHANGED
@@ -6,6 +6,7 @@ tags:
6
  - sentence-transformers
7
  - gte
8
  - mteb
 
9
  license: apache-2.0
10
  language:
11
  - en
@@ -2686,6 +2687,32 @@ embeddings = model.encode(sentences)
2686
  print(cos_sim(embeddings[0], embeddings[1]))
2687
  ```
2688
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2689
  ## Training Details
2690
 
2691
  ### Training Data
 
6
  - sentence-transformers
7
  - gte
8
  - mteb
9
+ - transformers.js
10
  license: apache-2.0
11
  language:
12
  - en
 
2687
  print(cos_sim(embeddings[0], embeddings[1]))
2688
  ```
2689
 
2690
+ Use with `transformers.js`:
2691
+
2692
+ ```js
2693
+ // npm i @xenova/transformers
2694
+ import { pipeline, dot } from '@xenova/transformers';
2695
+
2696
+ // Create feature extraction pipeline
2697
+ const extractor = await pipeline('feature-extraction', 'Alibaba-NLP/gte-large-en-v1.5', {
2698
+ quantized: false, // Comment out this line to use the quantized version
2699
+ });
2700
+
2701
+ // Generate sentence embeddings
2702
+ const sentences = [
2703
+ "what is the capital of China?",
2704
+ "how to implement quick sort in python?",
2705
+ "Beijing",
2706
+ "sorting algorithms"
2707
+ ]
2708
+ const output = await extractor(sentences, { normalize: true, pooling: 'cls' });
2709
+
2710
+ // Compute similarity scores
2711
+ const [source_embeddings, ...document_embeddings ] = output.tolist();
2712
+ const similarities = document_embeddings.map(x => 100 * dot(source_embeddings, x));
2713
+ console.log(similarities); // [41.86354093370361, 77.07076371259589, 37.02981979677899]
2714
+ ```
2715
+
2716
  ## Training Details
2717
 
2718
  ### Training Data