100daggers commited on
Commit
02610e9
·
verified ·
1 Parent(s): 54c6c3a

Training in progress, step 286

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "gpt2",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
 
1
  {
2
+ "_name_or_path": "100daggers/reuters-gpt2-text-gen",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:465a5bc9834d9b20145770a46bb03d598ecedd398b501bd33e5cb3f4ff74b733
3
  size 503128704
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53256dd612a9673284d09d625021075f00d9bc6304c6473100224bb06723194d
3
  size 503128704
runs/Aug22_13-13-29_cycle-gan-2-40gb-0/events.out.tfevents.1724325211.cycle-gan-2-40gb-0.6027.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f5d84f440c49be22aab973d7587f94a85ae33f6e27cde6cd49b18aff6fecabb
3
+ size 11099
special_tokens_map.json CHANGED
@@ -13,7 +13,13 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": "<|endoftext|>",
 
 
 
 
 
 
17
  "unk_token": {
18
  "content": "<|endoftext|>",
19
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": {
17
+ "content": "<|endoftext|>",
18
+ "lstrip": false,
19
+ "normalized": true,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
  "unk_token": {
24
  "content": "<|endoftext|>",
25
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -13,8 +13,12 @@
13
  "bos_token": "<|endoftext|>",
14
  "clean_up_tokenization_spaces": true,
15
  "eos_token": "<|endoftext|>",
 
16
  "model_max_length": 1024,
17
  "pad_token": "<|endoftext|>",
 
18
  "tokenizer_class": "GPT2Tokenizer",
 
 
19
  "unk_token": "<|endoftext|>"
20
  }
 
13
  "bos_token": "<|endoftext|>",
14
  "clean_up_tokenization_spaces": true,
15
  "eos_token": "<|endoftext|>",
16
+ "max_length": 512,
17
  "model_max_length": 1024,
18
  "pad_token": "<|endoftext|>",
19
+ "stride": 0,
20
  "tokenizer_class": "GPT2Tokenizer",
21
+ "truncation_side": "right",
22
+ "truncation_strategy": "longest_first",
23
  "unk_token": "<|endoftext|>"
24
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:26856072ad5665e5b25cee14716f63acdca4b9dd1a50ed271a66a5f83226b73d
3
  size 4795
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4601a3d752b81e244b72521bb9f3e52f5ed44160b466dabee765129dfd69275f
3
  size 4795