autonomous019 commited on
Commit
4eabcab
1 Parent(s): 11cb1d3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -20,8 +20,8 @@ from transformers import (
20
  # https://github.com/NielsRogge/Transformers-Tutorials/blob/master/HuggingFace_vision_ecosystem_overview_(June_2022).ipynb
21
  # option 1: load with randomly initialized weights (train from scratch)
22
 
23
- tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-j-6B")
24
- model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-j-6B")
25
 
26
 
27
  config = ViTConfig(num_hidden_layers=12, hidden_size=768)
@@ -43,8 +43,9 @@ def create_story(text_seed):
43
  #tokenizer = AutoTokenizer.from_pretrained("gpt2")
44
  #model = AutoModelForCausalLM.from_pretrained("gpt2")
45
 
46
- tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-j-6B")
47
- model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-j-6B")
 
48
 
49
  # set pad_token_id to eos_token_id because GPT2 does not have a EOS token
50
  model.config.pad_token_id = model.config.eos_token_id
 
20
  # https://github.com/NielsRogge/Transformers-Tutorials/blob/master/HuggingFace_vision_ecosystem_overview_(June_2022).ipynb
21
  # option 1: load with randomly initialized weights (train from scratch)
22
 
23
+ #tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-j-6B")
24
+ #model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-j-6B")
25
 
26
 
27
  config = ViTConfig(num_hidden_layers=12, hidden_size=768)
 
43
  #tokenizer = AutoTokenizer.from_pretrained("gpt2")
44
  #model = AutoModelForCausalLM.from_pretrained("gpt2")
45
 
46
+ #eleutherAI gpt-3 based
47
+ tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-125M")
48
+ model = AutoModelForCausalLM.from_pretrained("EleutherAI/gpt-neo-125M")
49
 
50
  # set pad_token_id to eos_token_id because GPT2 does not have a EOS token
51
  model.config.pad_token_id = model.config.eos_token_id