debbiesoon commited on
Commit
0bacb28
1 Parent(s): 5b86415

Training in progress, step 10

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "debbiesoon/summarise",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "architectures": [
@@ -45,7 +45,7 @@
45
  "length_penalty": 2.0,
46
  "max_decoder_position_embeddings": 1024,
47
  "max_encoder_position_embeddings": 16384,
48
- "max_length": 1000,
49
  "min_length": 100,
50
  "model_type": "led",
51
  "no_repeat_ngram_size": 3,
 
1
  {
2
+ "_name_or_path": "allenai/led-base-16384",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "architectures": [
 
45
  "length_penalty": 2.0,
46
  "max_decoder_position_embeddings": 1024,
47
  "max_encoder_position_embeddings": 16384,
48
+ "max_length": 384,
49
  "min_length": 100,
50
  "model_type": "led",
51
  "no_repeat_ngram_size": 3,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8f91e68d6f0b4964a1452e59408a7700c97252caaa9b7d9a956d13ebf07460fa
3
- size 647674353
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:354b45fd1dcbc5ead9aa6d574d57f797e7a526cbf4c63c4e516be5198cbc1550
3
+ size 647678513
runs/Oct20_04-39-07_6ebe326e3fd0/1666240988.9250832/events.out.tfevents.1666240988.6ebe326e3fd0.102.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e0d273fa5f6e2690045464f6cf0ae1ed0b13f8b0a47a7632d20b0e559c7249d0
3
+ size 5633
runs/Oct20_04-39-07_6ebe326e3fd0/events.out.tfevents.1666240988.6ebe326e3fd0.102.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:428ce84a8c7d33cbfe9a6a1c35cef197acd40c5d57c42ea2b647eb391f470bac
3
+ size 5113
tokenizer.json CHANGED
@@ -1,7 +1,21 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
4
- "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 384,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
+ "padding": {
10
+ "strategy": {
11
+ "Fixed": 384
12
+ },
13
+ "direction": "Right",
14
+ "pad_to_multiple_of": null,
15
+ "pad_id": 1,
16
+ "pad_type_id": 0,
17
+ "pad_token": "<pad>"
18
+ },
19
  "added_tokens": [
20
  {
21
  "id": 0,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:77a7b7ca9c2c1ce32ea8e431c10656e0c7ebf1b87e9f0937c796082d8611c058
3
  size 3439
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f3adc4dcf3a3adb9032416693194ffbea7d4c451450c5e044485494f4031f42
3
  size 3439