adamjweintraut commited on
Commit
d86b849
1 Parent(s): 3d1e4d8

bart-finetuned-loaf-512-lyrictoplan

Browse files
README.md CHANGED
@@ -15,7 +15,7 @@ should probably proofread and complete it, then remove this comment. -->
15
 
16
  This model is a fine-tuned version of [facebook/bart-large](https://huggingface.co/facebook/bart-large) on an unknown dataset.
17
  It achieves the following results on the evaluation set:
18
- - Loss: 0.4458
19
 
20
  ## Model description
21
 
@@ -47,12 +47,12 @@ The following hyperparameters were used during training:
47
 
48
  | Training Loss | Epoch | Step | Validation Loss |
49
  |:-------------:|:-----:|:----:|:---------------:|
50
- | 1.0218 | 0.45 | 500 | 0.5545 |
51
- | 0.5582 | 0.89 | 1000 | 0.5083 |
52
- | 0.4996 | 1.34 | 1500 | 0.4844 |
53
- | 0.4744 | 1.78 | 2000 | 0.4609 |
54
- | 0.449 | 2.23 | 2500 | 0.4560 |
55
- | 0.4287 | 2.67 | 3000 | 0.4458 |
56
 
57
 
58
  ### Framework versions
 
15
 
16
  This model is a fine-tuned version of [facebook/bart-large](https://huggingface.co/facebook/bart-large) on an unknown dataset.
17
  It achieves the following results on the evaluation set:
18
+ - Loss: 0.3928
19
 
20
  ## Model description
21
 
 
47
 
48
  | Training Loss | Epoch | Step | Validation Loss |
49
  |:-------------:|:-----:|:----:|:---------------:|
50
+ | 0.7003 | 0.45 | 500 | 0.5146 |
51
+ | 0.5357 | 0.89 | 1000 | 0.4827 |
52
+ | 0.4799 | 1.34 | 1500 | 0.4387 |
53
+ | 0.4354 | 1.78 | 2000 | 0.4132 |
54
+ | 0.4047 | 2.23 | 2500 | 0.4049 |
55
+ | 0.3739 | 2.67 | 3000 | 0.3928 |
56
 
57
 
58
  ### Framework versions
generation_config.json CHANGED
@@ -8,14 +8,14 @@
8
  "forced_bos_token_id": 0,
9
  "forced_eos_token_id": 2,
10
  "max_new_tokens": 512,
11
- "min_new_tokens": 128,
12
  "n_examples": null,
13
  "no_repeat_ngram_size": 3,
14
  "num_beams": 4,
15
  "pad_token_id": 1,
16
  "padding": "max_length",
17
  "renormalize_logits": true,
18
- "repetition_penalty": 0.6,
19
  "skip_special_tokens": true,
20
  "temperature": 0.85,
21
  "top_k": 0,
 
8
  "forced_bos_token_id": 0,
9
  "forced_eos_token_id": 2,
10
  "max_new_tokens": 512,
11
+ "min_new_tokens": 432,
12
  "n_examples": null,
13
  "no_repeat_ngram_size": 3,
14
  "num_beams": 4,
15
  "pad_token_id": 1,
16
  "padding": "max_length",
17
  "renormalize_logits": true,
18
+ "repetition_penalty": 0.4,
19
  "skip_special_tokens": true,
20
  "temperature": 0.85,
21
  "top_k": 0,
runs/Apr09_07-06-43_870b8dce1033/events.out.tfevents.1712646406.870b8dce1033.46497.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6c638f8c384a212ae5db7056fa2811fd9e3beb2e024556d8944eff0075bddc8c
3
- size 9033
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ed393bd04a082b5e8d71507b63b5d0b08f491b18c02e37a4d7dfdba25dff88ee
3
+ size 9387