tingtone commited on
Commit
1e6cb1e
1 Parent(s): 258fb55

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -1,6 +1,5 @@
1
  {
2
- "_name_or_path": "distilgpt2",
3
- "_num_labels": 1,
4
  "activation_function": "gelu_new",
5
  "adapters": {
6
  "adapters": {},
@@ -38,7 +37,7 @@
38
  "n_embd": 768,
39
  "n_head": 12,
40
  "n_inner": null,
41
- "n_layer": 6,
42
  "n_positions": 1024,
43
  "pad_token_id": 50256,
44
  "problem_type": "single_label_classification",
 
1
  {
2
+ "_name_or_path": "gpt2",
 
3
  "activation_function": "gelu_new",
4
  "adapters": {
5
  "adapters": {},
 
37
  "n_embd": 768,
38
  "n_head": 12,
39
  "n_inner": null,
40
+ "n_layer": 12,
41
  "n_positions": 1024,
42
  "pad_token_id": 50256,
43
  "problem_type": "single_label_classification",
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:77b039f86e720677f44e9b5e0f0c0d7fe843e6409e7a592c021baa646d653f4a
3
- size 333988796
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02f8d130e82b769d2a81578b05102432e714a655997cf0a2d0ad3f45ffdbf164
3
+ size 510416641
runs/Jun02_22-27-20_umn-20230531-185317/events.out.tfevents.1685744843.umn-20230531-185317.44367.16 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:faa8d70fb2847a056ffd58d74a3d5dd5b654bd2e104227b5296fd44defaf4f45
3
- size 9943
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a1736660d2182ee7ec481f3209f248fb2329483179758307572051ba6b3f392
3
+ size 10297
runs/Jun02_23-43-24_umn-20230531-185317/1685749410.8519301/events.out.tfevents.1685749410.umn-20230531-185317.74031.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a337714f7447791b83b1f5422e576386d3ffb02d2707691a24bd46eb0a167c60
3
+ size 5710
runs/Jun02_23-43-24_umn-20230531-185317/events.out.tfevents.1685749410.umn-20230531-185317.74031.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e041d109ed68191b32bf5a28688ef10dade35a1971e7786c1b3aaacedefc99a2
3
+ size 4052
runs/Jun03_14-58-02_umn-20230531-185317/1685804287.3083808/events.out.tfevents.1685804287.umn-20230531-185317.217977.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f0a770600ba95a51823409a8749129dcb4150b18a70fd3c05ee3f74df55b9696
3
+ size 5710
runs/Jun03_14-58-02_umn-20230531-185317/events.out.tfevents.1685804287.umn-20230531-185317.217977.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f707dbc9e17c57a38b4ebd23fbe8cd6e360ac89b6a68083879cc0aee007dd310
3
+ size 4051
runs/Jun07_19-04-08_umn-20230531-185317/1686164651.7487478/events.out.tfevents.1686164651.umn-20230531-185317.2792.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:469a7c0b33fbb4b4a2a1f3900f6acfaf35292f87fc424141fef0871212436121
3
+ size 5710
runs/Jun07_19-04-08_umn-20230531-185317/events.out.tfevents.1686164651.umn-20230531-185317.2792.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a3b0e825b3636f0bbdd9853f90a3c501e165c5d0eb357f03098e24d0ecbc303
3
+ size 5120
tokenizer.json CHANGED
@@ -1,7 +1,19 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
4
- "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 50256,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 1024,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
+ "padding": {
10
+ "strategy": "BatchLongest",
11
+ "direction": "Right",
12
+ "pad_to_multiple_of": null,
13
+ "pad_id": 50256,
14
+ "pad_type_id": 0,
15
+ "pad_token": "<|endoftext|>"
16
+ },
17
  "added_tokens": [
18
  {
19
  "id": 50256,
tokenizer_config.json CHANGED
@@ -3,7 +3,7 @@
3
  "bos_token": "<|endoftext|>",
4
  "eos_token": "<|endoftext|>",
5
  "model_max_length": 1024,
6
- "name_or_path": "distilgpt2",
7
  "special_tokens_map_file": null,
8
  "tokenizer_class": "GPT2Tokenizer",
9
  "unk_token": "<|endoftext|>"
 
3
  "bos_token": "<|endoftext|>",
4
  "eos_token": "<|endoftext|>",
5
  "model_max_length": 1024,
6
+ "name_or_path": "gpt2",
7
  "special_tokens_map_file": null,
8
  "tokenizer_class": "GPT2Tokenizer",
9
  "unk_token": "<|endoftext|>"
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:128404ea15fb549b26c6833a6d018d040ed2a2f3171630c53f8d17e64e0e14d3
3
  size 3515
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:998bd43cf2f49b62dc891ee523396b9e89a04c31e4dfd04b96b4ad673f10066d
3
  size 3515