Changed the LLM to Phi3
Browse files
rag.py
CHANGED
@@ -8,7 +8,7 @@ from transformers import AutoTokenizer , AutoModelForCausalLM
|
|
8 |
class RAG:
|
9 |
|
10 |
def __init__(self):
|
11 |
-
self.model_id = "
|
12 |
self.device = "cuda" if torch.cuda.is_available() else "cpu"
|
13 |
|
14 |
self.embedding_model_name = "all-mpnet-base-v2"
|
|
|
8 |
class RAG:
|
9 |
|
10 |
def __init__(self):
|
11 |
+
self.model_id = "microsoft/Phi-3-mini-128k-instruct"
|
12 |
self.device = "cuda" if torch.cuda.is_available() else "cpu"
|
13 |
|
14 |
self.embedding_model_name = "all-mpnet-base-v2"
|