hugo1234 commited on
Commit
026f136
1 Parent(s): 62b7b49

Update utils.py

Browse files
Files changed (1) hide show
  1. utils.py +5 -5
utils.py CHANGED
@@ -78,14 +78,14 @@ def generate_prompt_with_history(text, history, tokenizer, max_length=2048):
78
  #model = AutoModelForCausalLM.from_pretrained("dbmdz/bert-base-italian-cased")
79
  #tokenizer = AutoTokenizer.from_pretrained("asi/gpt-fr-cased-small")
80
  #model = AutoModelForCausalLM.from_pretrained("asi/gpt-fr-cased-small")
81
- #tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-1.3B")
82
- #model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-neo-1.3B")
83
- tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-2.7B")
84
- model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-neo-2.7B")
85
 
86
 
87
  def load_tokenizer_and_model(base_model,load_8bit=False):
88
- base_model = "EleutherAI/gpt-neo-2.7B"
89
  if torch.cuda.is_available():
90
  device = "cuda"
91
  else:
 
78
  #model = AutoModelForCausalLM.from_pretrained("dbmdz/bert-base-italian-cased")
79
  #tokenizer = AutoTokenizer.from_pretrained("asi/gpt-fr-cased-small")
80
  #model = AutoModelForCausalLM.from_pretrained("asi/gpt-fr-cased-small")
81
+ #tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-2.7B")
82
+ #model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-neo-2.7B")
83
+ tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-1.3B")
84
+ model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-neo-1.3B")
85
 
86
 
87
  def load_tokenizer_and_model(base_model,load_8bit=False):
88
+ base_model = "EleutherAI/gpt-neo-1.3B"
89
  if torch.cuda.is_available():
90
  device = "cuda"
91
  else: