tomwetherell commited on
Commit
4d7946b
1 Parent(s): c5ce307

Training in progress, epoch 1

Browse files
.gitignore ADDED
@@ -0,0 +1 @@
 
1
+ checkpoint-*/
config.json ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "deepmind/language-perceiver",
3
+ "architectures": [
4
+ "PerceiverForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "audio_samples_per_frame": 1920,
8
+ "cross_attention_shape_for_attention": "kv",
9
+ "cross_attention_widening_factor": 1,
10
+ "d_latents": 1280,
11
+ "d_model": 768,
12
+ "hidden_act": "gelu",
13
+ "id2label": {
14
+ "0": "negative",
15
+ "1": "neutral",
16
+ "2": "positive"
17
+ },
18
+ "image_size": 56,
19
+ "initializer_range": 0.02,
20
+ "label2id": {
21
+ "negative": 0,
22
+ "neutral": 1,
23
+ "positive": 2
24
+ },
25
+ "layer_norm_eps": 1e-12,
26
+ "max_position_embeddings": 2048,
27
+ "model_type": "perceiver",
28
+ "num_blocks": 1,
29
+ "num_cross_attention_heads": 8,
30
+ "num_frames": 16,
31
+ "num_latents": 256,
32
+ "num_self_attends_per_block": 26,
33
+ "num_self_attention_heads": 8,
34
+ "output_shape": [
35
+ 1,
36
+ 16,
37
+ 224,
38
+ 224
39
+ ],
40
+ "problem_type": "single_label_classification",
41
+ "qk_channels": 256,
42
+ "samples_per_patch": 16,
43
+ "self_attention_widening_factor": 1,
44
+ "torch_dtype": "float32",
45
+ "train_size": [
46
+ 368,
47
+ 496
48
+ ],
49
+ "transformers_version": "4.15.0",
50
+ "use_query_residual": true,
51
+ "v_channels": 1280,
52
+ "vocab_size": 262
53
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:623aa51743093fa3a17f0339e4773f1bb8467c06175cacd06d573e83b1309c1e
3
+ size 824551789
runs/Jan20_17-25-37_636afc414cc8/1642699563.4359243/events.out.tfevents.1642699563.636afc414cc8.62.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bcac2a7d6aab1d8ba5b8657d830c19cbfe0c6d2b92343ac2c6dbc255586de359
3
+ size 4713
runs/Jan20_17-25-37_636afc414cc8/events.out.tfevents.1642699563.636afc414cc8.62.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c575a7cbdeb65cd72ed2fcd2d734136b195c5f3127fa2ed6511f2613584b7ed
3
+ size 12717
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": {"content": "[BOS]", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "eos_token": {"content": "[EOS]", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "sep_token": {"content": "[SEP]", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "pad_token": {"content": "[PAD]", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "cls_token": {"content": "[CLS]", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "mask_token": {"content": "[MASK]", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"pad_token": {"content": "[PAD]", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "bos_token": {"content": "[BOS]", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "eos_token": {"content": "[EOS]", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "mask_token": {"content": "[MASK]", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "cls_token": {"content": "[CLS]", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "sep_token": {"content": "[SEP]", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "model_max_length": 2048, "special_tokens_map_file": "/root/.cache/huggingface/transformers/1e672f45699664b4878133bd3625998f38651ea1b4f36cb5722aff0fe0404bd8.7ff539969264e209ec2a7e131831ba24df744708e52840079a10a7a1412761fb", "tokenizer_file": null, "name_or_path": "deepmind/language-perceiver", "tokenizer_class": "PerceiverTokenizer"}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4200fa34c79027f96b45f532e3b915bcf4d306126fcccc0f3d7892badcc9b29b
3
+ size 2927