joshcarp commited on
Commit
e1d183a
1 Parent(s): 6843264

Training in progress, epoch 1

Browse files
Files changed (3) hide show
  1. config.json +3 -11
  2. model.safetensors +2 -2
  3. tokenizer.json +6 -1
config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "_name_or_path": "gpt2",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
@@ -11,11 +10,10 @@
11
  "initializer_range": 0.02,
12
  "layer_norm_epsilon": 1e-05,
13
  "model_type": "gpt2",
14
- "n_ctx": 1024,
15
- "n_embd": 768,
16
- "n_head": 12,
17
  "n_inner": null,
18
- "n_layer": 12,
19
  "n_positions": 1024,
20
  "reorder_and_upcast_attn": false,
21
  "resid_pdrop": 0.1,
@@ -26,12 +24,6 @@
26
  "summary_proj_to_labels": true,
27
  "summary_type": "cls_index",
28
  "summary_use_proj": true,
29
- "task_specific_params": {
30
- "text-generation": {
31
- "do_sample": true,
32
- "max_length": 50
33
- }
34
- },
35
  "torch_dtype": "float32",
36
  "transformers_version": "4.40.1",
37
  "use_cache": true,
 
1
  {
 
2
  "activation_function": "gelu_new",
3
  "architectures": [
4
  "GPT2LMHeadModel"
 
10
  "initializer_range": 0.02,
11
  "layer_norm_epsilon": 1e-05,
12
  "model_type": "gpt2",
13
+ "n_embd": 96,
14
+ "n_head": 8,
 
15
  "n_inner": null,
16
+ "n_layer": 4,
17
  "n_positions": 1024,
18
  "reorder_and_upcast_attn": false,
19
  "resid_pdrop": 0.1,
 
24
  "summary_proj_to_labels": true,
25
  "summary_type": "cls_index",
26
  "summary_use_proj": true,
 
 
 
 
 
 
27
  "torch_dtype": "float32",
28
  "transformers_version": "4.40.1",
29
  "use_cache": true,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b87d089352db00697642036a3fdfa154ff2227f1966d2d379dd4ca9a4fb4523e
3
- size 497774208
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f19a53748ef7829c453fd4887a282f741186e3ba52abc703e6a2e2e651fd108a
3
+ size 21487080
tokenizer.json CHANGED
@@ -1,6 +1,11 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 512,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
  "padding": null,
10
  "added_tokens": [
11
  {