Abdul-Ib commited on
Commit
6456b5c
1 Parent(s): 4afd3d6

Update helper_functions.py

Browse files
Files changed (1) hide show
  1. helper_functions.py +31 -8
helper_functions.py CHANGED
@@ -4,21 +4,44 @@ import numpy as np
4
  import nest_asyncio
5
  import fasttext
6
  import torch
 
7
  nest_asyncio.apply()
8
  from typing import List
9
  from rank_bm25 import BM25L
10
  from normalizer import Normalizer
11
  from fastapi import HTTPException
12
- from sentence_transformers import SentenceTransformer, util
13
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
 
15
  # Initialization
16
- normalizer = Normalizer()
17
- model_path = "Abdul-Ib/all-MiniLM-L6-v2-2024"
18
- semantic_model = SentenceTransformer(model_path, cache_folder="./assets")
19
- categorizer = fasttext.load_model("./assets/categorization_pipeline.ftz")
20
-
21
- category_map = np.load("./assets/category_map.npy", allow_pickle=True).item()
 
 
 
22
 
23
 
24
  def make_request(url: str) -> dict:
 
4
  import nest_asyncio
5
  import fasttext
6
  import torch
7
+
8
  nest_asyncio.apply()
9
  from typing import List
10
  from rank_bm25 import BM25L
11
  from normalizer import Normalizer
12
  from fastapi import HTTPException
13
+ from optimum.onnxruntime import ORTModelForFeatureExtraction
14
+ from sentenceTranformer import SentenceEmbeddingPipeline
15
+ from transformers import AutoTokenizer
16
+ from main import logger
17
+
18
+ # Initialize
19
+ # model_path = "Abdul-Ib/all-MiniLM-L6-v2-2024"
20
+ # semantic_model = SentenceTransformer(model_path, cache_folder="./assets")
21
+
22
+ try:
23
+ # Load the semantic model
24
+ tokenizer = AutoTokenizer.from_pretrained("./assets/onnx")
25
+ model = ORTModelForFeatureExtraction.from_pretrained(
26
+ "./assets/onnx", file_name="model_quantized.onnx"
27
+ )
28
+ semantic_model = SentenceEmbeddingPipeline(model=model, tokenizer=tokenizer)
29
+ except Exception as e:
30
+ raise HTTPException(
31
+ status_code=500,
32
+ detail=f"An error occurred during semantic model loading: {e}",
33
+ )
34
 
35
  # Initialization
36
+ try:
37
+ normalizer = Normalizer()
38
+ categorizer = fasttext.load_model("./assets/categorization_pipeline.ftz")
39
+ category_map = np.load("./assets/category_map.npy", allow_pickle=True).item()
40
+ except Exception as e:
41
+ raise HTTPException(
42
+ status_code=500,
43
+ detail=f"An error occurred during initialization of categorizer and normalizer: {e}",
44
+ )
45
 
46
 
47
  def make_request(url: str) -> dict: