danielkty22 commited on
Commit
45a5bb9
1 Parent(s): 811017a

Training in progress, epoch 3

Browse files
Files changed (3) hide show
  1. config.json +39 -0
  2. model.safetensors +3 -0
  3. training_args.bin +2 -2
config.json CHANGED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "gpt2",
3
+ "activation_function": "gelu_new",
4
+ "architectures": [
5
+ "GPT2ForQuestionAnswering"
6
+ ],
7
+ "attn_pdrop": 0.1,
8
+ "bos_token_id": 50256,
9
+ "embd_pdrop": 0.1,
10
+ "eos_token_id": 50256,
11
+ "initializer_range": 0.02,
12
+ "layer_norm_epsilon": 1e-05,
13
+ "model_type": "gpt2",
14
+ "n_ctx": 1024,
15
+ "n_embd": 768,
16
+ "n_head": 12,
17
+ "n_inner": null,
18
+ "n_layer": 12,
19
+ "n_positions": 1024,
20
+ "reorder_and_upcast_attn": false,
21
+ "resid_pdrop": 0.1,
22
+ "scale_attn_by_inverse_layer_idx": false,
23
+ "scale_attn_weights": true,
24
+ "summary_activation": null,
25
+ "summary_first_dropout": 0.1,
26
+ "summary_proj_to_labels": true,
27
+ "summary_type": "cls_index",
28
+ "summary_use_proj": true,
29
+ "task_specific_params": {
30
+ "text-generation": {
31
+ "do_sample": true,
32
+ "max_length": 50
33
+ }
34
+ },
35
+ "torch_dtype": "float32",
36
+ "transformers_version": "4.36.1",
37
+ "use_cache": true,
38
+ "vocab_size": 50257
39
+ }
model.safetensors CHANGED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a8b25c16aca213616f9257ce905b26b937d92e33070425b6e3412f6d1ab7abc
3
+ size 497780512
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0a5d8cb9be9c6b9f2cb1bd81e2a2296ee793bd611e7a3c4b885ee9f48e432d29
3
- size 4664
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d08dadfd37d7f7dc7ab35271efebcdd4c3dbc9a5aa59d3b797e2ef1b653fd60b
3
+ size 4920