AjithBharadwaj commited on
Commit
6817ac7
1 Parent(s): 7643cf4

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +5 -5
main.py CHANGED
@@ -1,13 +1,13 @@
1
  from langchain_community.llms.huggingface_pipeline import HuggingFacePipeline
2
  from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline,BitsAndBytesConfig
3
- import accelerate
4
- import bitsandbytes
5
  from langchain_core.prompts import PromptTemplate
6
 
7
- quants = BitsAndBytesConfig(load_in_4bit=True)
8
  model_id = "mistralai/Mistral-7B-Instruct-v0.2"
9
- tokenizer = AutoTokenizer.from_pretrained(model_id,quantization_config=quants)
10
- model = AutoModelForCausalLM.from_pretrained(model_id,quantization_config=quants)
11
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
12
  hf = HuggingFacePipeline(pipeline=pipe)
13
 
 
1
  from langchain_community.llms.huggingface_pipeline import HuggingFacePipeline
2
  from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline,BitsAndBytesConfig
3
+ # import accelerate
4
+ # import bitsandbytes
5
  from langchain_core.prompts import PromptTemplate
6
 
7
+ # quants = BitsAndBytesConfig(load_in_4bit=True)
8
  model_id = "mistralai/Mistral-7B-Instruct-v0.2"
9
+ tokenizer = AutoTokenizer.from_pretrained(model_id,load_in_8bit=True)
10
+ model = AutoModelForCausalLM.from_pretrained(model_id,load_in_8bit=True)
11
  pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
12
  hf = HuggingFacePipeline(pipeline=pipe)
13