mery22 commited on
Commit
cff9147
1 Parent(s): 26a036e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -7
app.py CHANGED
@@ -18,13 +18,14 @@ from langchain.chains import LLMChain
18
  import transformers
19
 
20
  import transformers
21
- model_name='mistralai/Mistral-7B-Instruct-v0.1'
22
- from huggingface_hub import login
23
- model_config = transformers.AutoConfig.from_pretrained(
24
- model_name,
25
- )
 
 
26
 
27
- tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
28
  tokenizer.pad_token = tokenizer.eos_token
29
  tokenizer.padding_side = "right"
30
 
@@ -61,7 +62,7 @@ bnb_config = BitsAndBytesConfig(
61
  # Load pre-trained config
62
  #################################################################
63
  model = AutoModelForCausalLM.from_pretrained(
64
- model_name,
65
  quantization_config=bnb_config,
66
  )
67
  # Connect query to FAISS index using a retriever
 
18
  import transformers
19
 
20
  import transformers
21
+ model_name=# Use a pipeline as a high-level helper
22
+ from transformers import pipeline
23
+
24
+ # Load model directly
25
+ from transformers import AutoTokenizer, AutoModelForCausalLM
26
+
27
+ tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.1")
28
 
 
29
  tokenizer.pad_token = tokenizer.eos_token
30
  tokenizer.padding_side = "right"
31
 
 
62
  # Load pre-trained config
63
  #################################################################
64
  model = AutoModelForCausalLM.from_pretrained(
65
+ "mistralai/Mistral-7B-Instruct-v0.1",
66
  quantization_config=bnb_config,
67
  )
68
  # Connect query to FAISS index using a retriever