dat commited on
Commit
d8a2b47
1 Parent(s): 0547e9f

Saving weights and logs of step 150000

Browse files
Load data & train tokenizer.ipynb CHANGED
@@ -54,12 +54,38 @@
54
  },
55
  {
56
  "cell_type": "code",
57
- "execution_count": 2,
58
  "id": "348a4dd4",
59
  "metadata": {},
60
  "outputs": [],
61
  "source": [
62
- "tokenizer = AutoTokenizer.from_pretrained(\"./\")"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
  ]
64
  },
65
  {
 
54
  },
55
  {
56
  "cell_type": "code",
57
+ "execution_count": null,
58
  "id": "348a4dd4",
59
  "metadata": {},
60
  "outputs": [],
61
  "source": [
62
+ "from transformers import pipeline, AutoTokenizer,BigBirdForMaskedLM, pipeline\n",
63
+ "tokenizer = AutoTokenizer.from_pretrained(\".\")\n",
64
+ "model = FlaxBigBirdForMaskedLM.from_pretrained(\".\")\n",
65
+ "\n",
66
+ "unmasker = pipeline('fill-mask',tokenizer=tokenizer, model=model)"
67
+ ]
68
+ },
69
+ {
70
+ "cell_type": "code",
71
+ "execution_count": 1,
72
+ "id": "4b1bb489",
73
+ "metadata": {},
74
+ "outputs": [
75
+ {
76
+ "ename": "NameError",
77
+ "evalue": "name 'unmasker' is not defined",
78
+ "output_type": "error",
79
+ "traceback": [
80
+ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
81
+ "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
82
+ "\u001b[0;32m/tmp/ipykernel_759685/2003099555.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0munmasker\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"test\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
83
+ "\u001b[0;31mNameError\u001b[0m: name 'unmasker' is not defined"
84
+ ]
85
+ }
86
+ ],
87
+ "source": [
88
+ "unmasker(\"test\")"
89
  ]
90
  },
91
  {
checkpoint_150000 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5e3a86c577a503cb52846a97cb2b56106974afab3bedc8617a0c5bc1e89907ea
3
+ size 1530270447
config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "_name_or_path": ".",
3
  "architectures": [
4
  "BigBirdForMaskedLM"
5
  ],
@@ -24,7 +23,6 @@
24
  "position_embedding_type": "absolute",
25
  "rescale_embeddings": false,
26
  "sep_token_id": 66,
27
- "torch_dtype": "float32",
28
  "transformers_version": "4.9.0.dev0",
29
  "type_vocab_size": 2,
30
  "use_bias": true,
 
1
  {
 
2
  "architectures": [
3
  "BigBirdForMaskedLM"
4
  ],
 
23
  "position_embedding_type": "absolute",
24
  "rescale_embeddings": false,
25
  "sep_token_id": 66,
 
26
  "transformers_version": "4.9.0.dev0",
27
  "type_vocab_size": 2,
28
  "use_bias": true,
flax_model.msgpack CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:29ed0773269721507328c69a2ccd94dd845503587ae4a89525ec4f204ab05f0d
3
  size 510090043
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ec0fb67c52b43bbdf61a82273bba1919d00f297e8de79d79148b5f14a013a41
3
  size 510090043
wandb/run-20210715_185845-dq8uirtg/files/output.log CHANGED
The diff for this file is too large to render. See raw diff
 
wandb/run-20210715_185845-dq8uirtg/files/wandb-summary.json CHANGED
@@ -1 +1 @@
1
- {"training_step": 136400, "learning_rate": 2.4548056899220683e-05, "train_loss": 2.1764094829559326, "_runtime": 15833, "_timestamp": 1626391358, "_step": 733, "eval_step": 132000, "eval_accuracy": 0.5815154314041138, "eval_loss": 2.216282367706299}
 
1
+ {"training_step": 150000, "learning_rate": 2.396145646343939e-05, "train_loss": 2.3190174102783203, "_runtime": 21476, "_timestamp": 1626397001, "_step": 1008, "eval_step": 150000, "eval_accuracy": 0.5905819535255432, "eval_loss": 2.15248703956604}
wandb/run-20210715_185845-dq8uirtg/logs/debug-internal.log CHANGED
The diff for this file is too large to render. See raw diff
 
wandb/run-20210715_185845-dq8uirtg/run-dq8uirtg.wandb CHANGED
Binary files a/wandb/run-20210715_185845-dq8uirtg/run-dq8uirtg.wandb and b/wandb/run-20210715_185845-dq8uirtg/run-dq8uirtg.wandb differ