maxmyn commited on
Commit
eed6bca
1 Parent(s): 1af738c

Upload config

Browse files
Files changed (1) hide show
  1. config.json +1 -2
config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "_name_or_path": "mrm8488/gpt-neo-1.3B-8bit",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPTNeoForCausalLM"
@@ -53,6 +52,7 @@
53
  "model_type": "gpt_neo",
54
  "num_heads": 16,
55
  "num_layers": 24,
 
56
  "resid_dropout": 0,
57
  "summary_activation": null,
58
  "summary_first_dropout": 0.1,
@@ -67,7 +67,6 @@
67
  }
68
  },
69
  "tokenizer_class": "GPT2Tokenizer",
70
- "torch_dtype": "float32",
71
  "transformers_version": "4.36.0",
72
  "use_cache": true,
73
  "vocab_size": 50257,
 
1
  {
 
2
  "activation_function": "gelu_new",
3
  "architectures": [
4
  "GPTNeoForCausalLM"
 
52
  "model_type": "gpt_neo",
53
  "num_heads": 16,
54
  "num_layers": 24,
55
+ "pad_token_id": 50256,
56
  "resid_dropout": 0,
57
  "summary_activation": null,
58
  "summary_first_dropout": 0.1,
 
67
  }
68
  },
69
  "tokenizer_class": "GPT2Tokenizer",
 
70
  "transformers_version": "4.36.0",
71
  "use_cache": true,
72
  "vocab_size": 50257,