Pclanglais commited on
Commit
b2c6adc
1 Parent(s): 6e2babb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -27,7 +27,7 @@ embeddings_text = embeddings_data["text_with_context"].tolist()
27
 
28
  #Importing the classifier/router (deberta)
29
  classifier_model = AutoModelForSequenceClassification.from_pretrained("AgentPublic/chatrag-deberta")
30
- tokenizer = AutoTokenizer.from_pretrained("AgentPublic/chatrag-deberta")
31
 
32
  #Importing the actual generative LLM (llama-based)
33
  model_name = "Pclanglais/Tchap"
@@ -40,7 +40,7 @@ system_prompt = "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n
40
  #Function to guess whether we use the RAG or not.
41
  def classification_chatrag(query):
42
  print(query)
43
- encoding = tokenizer(query, return_tensors="pt")
44
  encoding = {k: v.to(classifier_model.device) for k,v in encoding.items()}
45
 
46
  outputs = classifier_model(**encoding)
 
27
 
28
  #Importing the classifier/router (deberta)
29
  classifier_model = AutoModelForSequenceClassification.from_pretrained("AgentPublic/chatrag-deberta")
30
+ classifier_tokenizer = AutoTokenizer.from_pretrained("AgentPublic/chatrag-deberta")
31
 
32
  #Importing the actual generative LLM (llama-based)
33
  model_name = "Pclanglais/Tchap"
 
40
  #Function to guess whether we use the RAG or not.
41
  def classification_chatrag(query):
42
  print(query)
43
+ encoding = classifier_tokenizer(query, return_tensors="pt")
44
  encoding = {k: v.to(classifier_model.device) for k,v in encoding.items()}
45
 
46
  outputs = classifier_model(**encoding)