jkeisling commited on
Commit
fb24f54
·
1 Parent(s): cdbe788

Initial commit: Port over untracked work in flight

Browse files
.gitignore CHANGED
@@ -1,3 +1,7 @@
 
 
 
 
1
  # Byte-compiled / optimized / DLL files
2
  __pycache__/
3
  *.py[cod]
 
1
+ # Large files
2
+ checkpoints/
3
+ datasets/
4
+
5
  # Byte-compiled / optimized / DLL files
6
  __pycache__/
7
  *.py[cod]
gpt.ipynb ADDED
@@ -0,0 +1,708 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "attachments": {},
5
+ "cell_type": "markdown",
6
+ "metadata": {},
7
+ "source": [
8
+ "# Learn GPT from scratch"
9
+ ]
10
+ },
11
+ {
12
+ "cell_type": "code",
13
+ "execution_count": 36,
14
+ "metadata": {},
15
+ "outputs": [],
16
+ "source": [
17
+ "import os\n",
18
+ "\n",
19
+ "# We always start with a dataset to train on. Let's download the tiny shakespeare dataset\n",
20
+ "if not os.path.isfile(\"./datasets/corpora/shakespeare.txt\"):\n",
21
+ " !wget https://raw.githubusercontent.com/karpathy/char-rnn/master/data/tinyshakespeare/input.txt > datasets/corpora/shakespeare.txt"
22
+ ]
23
+ },
24
+ {
25
+ "cell_type": "code",
26
+ "execution_count": 10,
27
+ "metadata": {},
28
+ "outputs": [],
29
+ "source": [
30
+ "with open(\"datasets/corpora/shakespeare.txt\", 'r', encoding='utf-8') as f:\n",
31
+ " text = f.read()"
32
+ ]
33
+ },
34
+ {
35
+ "attachments": {},
36
+ "cell_type": "markdown",
37
+ "metadata": {},
38
+ "source": [
39
+ "## Tokenization and dataset creation"
40
+ ]
41
+ },
42
+ {
43
+ "cell_type": "code",
44
+ "execution_count": 11,
45
+ "metadata": {},
46
+ "outputs": [
47
+ {
48
+ "data": {
49
+ "text/plain": [
50
+ "<torch._C.Generator at 0x7f7b543cb430>"
51
+ ]
52
+ },
53
+ "execution_count": 11,
54
+ "metadata": {},
55
+ "output_type": "execute_result"
56
+ }
57
+ ],
58
+ "source": [
59
+ "import torch\n",
60
+ "import torch.nn as nn\n",
61
+ "import torch.optim as optim\n",
62
+ "from torch.optim import lr_scheduler\n",
63
+ "import torch.nn.functional as F\n",
64
+ "from torch.utils.data import Dataset, DataLoader, TensorDataset, random_split\n",
65
+ "import pandas as pd\n",
66
+ "import numpy as np\n",
67
+ "import math\n",
68
+ "\n",
69
+ "torch.manual_seed(1337)"
70
+ ]
71
+ },
72
+ {
73
+ "cell_type": "code",
74
+ "execution_count": 12,
75
+ "metadata": {},
76
+ "outputs": [],
77
+ "source": [
78
+ "# Simple dumb ASCII character-level \"encoding\" since all training data is ASCII\n",
79
+ "def encode_text(text):\n",
80
+ " return([ord(t) for t in text])\n",
81
+ "\n",
82
+ "def decode_text(indices):\n",
83
+ " return([chr(x) for x in indices])\n"
84
+ ]
85
+ },
86
+ {
87
+ "cell_type": "code",
88
+ "execution_count": 13,
89
+ "metadata": {},
90
+ "outputs": [],
91
+ "source": [
92
+ "# Tensorify data, put it in dataset\n",
93
+ "data = torch.tensor(encode_text(text), dtype=torch.int32)\n",
94
+ "\n",
95
+ "split_idx = int(0.9 * len(data))\n",
96
+ "train_data = data[:split_idx]\n",
97
+ "test_data = data[split_idx:]"
98
+ ]
99
+ },
100
+ {
101
+ "attachments": {},
102
+ "cell_type": "markdown",
103
+ "metadata": {},
104
+ "source": [
105
+ "We have to make a custom PyTorch dataset class to automatically generate the \"context\" windows at load time. This allows us to avoid keeping these windows around in memory when not in use:"
106
+ ]
107
+ },
108
+ {
109
+ "cell_type": "code",
110
+ "execution_count": 31,
111
+ "metadata": {},
112
+ "outputs": [],
113
+ "source": [
114
+ "class TextDataset(Dataset):\n",
115
+ " def __init__(self, data_tensor, context_size):\n",
116
+ " self.data_tensor = data_tensor\n",
117
+ " self.context_size = context_size\n",
118
+ " \n",
119
+ " def __len__(self):\n",
120
+ " return len(self.data_tensor)\n",
121
+ "\n",
122
+ " def __getitem__(self, index):\n",
123
+ " if index < self.context_size:\n",
124
+ " x = F.pad(self.data_tensor[:index], (self.context_size - index, 0), value=0)\n",
125
+ " else:\n",
126
+ " x = self.data_tensor[index - self.context_size:index]\n",
127
+ " \n",
128
+ " y = self.data_tensor[index]\n",
129
+ " return x, y"
130
+ ]
131
+ },
132
+ {
133
+ "attachments": {},
134
+ "cell_type": "markdown",
135
+ "metadata": {},
136
+ "source": [
137
+ "NOTE 2023-03-25: I think this is bugged, and that's the reason the training loss is so damn high. Testing:"
138
+ ]
139
+ },
140
+ {
141
+ "cell_type": "code",
142
+ "execution_count": 34,
143
+ "metadata": {},
144
+ "outputs": [
145
+ {
146
+ "name": "stdout",
147
+ "output_type": "stream",
148
+ "text": [
149
+ "Step 0:\n",
150
+ "[0, 0, 0, 0, 0, 0, 0, 0]\n",
151
+ "---\n",
152
+ "[0, 0, 0, 0, 0, 0, 0, 70]\n",
153
+ "---\n",
154
+ "['F', 'i']\n",
155
+ "Step 1:\n",
156
+ "[0, 0, 0, 0, 0, 0, 70, 105]\n",
157
+ "---\n",
158
+ "[0, 0, 0, 0, 0, 70, 105, 114]\n",
159
+ "---\n",
160
+ "['r', 's']\n",
161
+ "Step 2:\n",
162
+ "[0, 0, 0, 0, 70, 105, 114, 115]\n",
163
+ "---\n",
164
+ "[0, 0, 0, 70, 105, 114, 115, 116]\n",
165
+ "---\n",
166
+ "['t', ' ']\n",
167
+ "Step 3:\n",
168
+ "[0, 0, 70, 105, 114, 115, 116, 32]\n",
169
+ "---\n",
170
+ "[0, 70, 105, 114, 115, 116, 32, 67]\n",
171
+ "---\n",
172
+ "['C', 'i']\n",
173
+ "Step 4:\n",
174
+ "[70, 105, 114, 115, 116, 32, 67, 105]\n",
175
+ "---\n",
176
+ "[105, 114, 115, 116, 32, 67, 105, 116]\n",
177
+ "---\n",
178
+ "['t', 'i']\n",
179
+ "Step 5:\n",
180
+ "[114, 115, 116, 32, 67, 105, 116, 105]\n",
181
+ "---\n",
182
+ "[115, 116, 32, 67, 105, 116, 105, 122]\n",
183
+ "---\n",
184
+ "['z', 'e']\n"
185
+ ]
186
+ }
187
+ ],
188
+ "source": [
189
+ "train_dataset = TextDataset(train_data, 8)\n",
190
+ "train_dataloader = DataLoader(train_dataset, batch_size=2, shuffle=False)\n",
191
+ "\n",
192
+ "step = 0\n",
193
+ "for x, y in train_dataloader:\n",
194
+ " print(f\"Step {step}:\")\n",
195
+ " for b in x.tolist():\n",
196
+ " print(b)\n",
197
+ " print(\"---\")\n",
198
+ "\n",
199
+ " print(decode_text(y.tolist()))\n",
200
+ " step += 1\n",
201
+ " if step > 5:\n",
202
+ " break\n",
203
+ "\n"
204
+ ]
205
+ },
206
+ {
207
+ "attachments": {},
208
+ "cell_type": "markdown",
209
+ "metadata": {},
210
+ "source": [
211
+ "## Attention is all you need (注目こそが必要なすべて)"
212
+ ]
213
+ },
214
+ {
215
+ "cell_type": "code",
216
+ "execution_count": 8,
217
+ "metadata": {},
218
+ "outputs": [],
219
+ "source": [
220
+ "class MultiheadAttention(nn.Module):\n",
221
+ " def __init__(self, embed_dim, num_heads, dropout=0.0, bias=True, device=None, dtype=None):\n",
222
+ " super(MultiheadAttention, self).__init__()\n",
223
+ "\n",
224
+ " # Save variables\n",
225
+ " self.embed_dim = embed_dim\n",
226
+ " self.num_heads = num_heads\n",
227
+ " self.d_k = embed_dim // num_heads\n",
228
+ "\n",
229
+ " self.Q = nn.Linear(embed_dim, embed_dim, bias=bias)\n",
230
+ " self.K = nn.Linear(embed_dim, embed_dim, bias=bias)\n",
231
+ " self.V = nn.Linear(embed_dim, embed_dim, bias=bias)\n",
232
+ "\n",
233
+ " self.dropout = nn.Dropout(dropout)\n",
234
+ " self.out_proj = nn.Linear(embed_dim, embed_dim, bias=False)\n",
235
+ " nn.init.kaiming_normal_(self.out_proj.weight, mode='fan_in', nonlinearity='linear')\n",
236
+ "\n",
237
+ " def forward(self, query, key, value, key_padding_mask=None):\n",
238
+ " batch_size = query.size(0)\n",
239
+ "\n",
240
+ " # Apply linear layers\n",
241
+ " q = self.Q(query) # [B, C, E]\n",
242
+ " k = self.K(key) # [B, C, E]\n",
243
+ " v = self.V(value) # [B, C, E]\n",
244
+ "\n",
245
+ " # Mutate dimensions so the attention matmul can get rid of the inner d_k\n",
246
+ " q = q.view(batch_size, -1, self.num_heads, self.d_k).transpose(1, 2) # [batch_size, num_heads, C, d_k]\n",
247
+ " k = k.view(batch_size, -1, self.num_heads, self.d_k).transpose(1, 2) # [batch_size, num_heads, C, d_k]\n",
248
+ " v = v.view(batch_size, -1, self.num_heads, self.d_k).transpose(1, 2) # [batch_size, num_heads, C, d_k]\n",
249
+ " \n",
250
+ " # Get raw attention scores\n",
251
+ " scores = torch.matmul(q, k.transpose(-2, -1)) / math.sqrt(self.d_k) # [B, num_heads, C, C]\n",
252
+ "\n",
253
+ " # Apply mask, if necessary\n",
254
+ " if key_padding_mask is not None:\n",
255
+ " \"\"\"\n",
256
+ " MAY BE WORTH DEBUGGING\n",
257
+ "\n",
258
+ " if key_padding_mask.dim() == 3:\n",
259
+ " # If the mask is 3D, add an extra dimension for the num_heads\n",
260
+ " key_padding_mask = key_padding_mask.unsqueeze(1) # [batch_size, 1, seq_len, seq_len]\n",
261
+ " else:\n",
262
+ " # If the mask is 2D, add dimensions for the num_heads and the 'query' sequence length\n",
263
+ " key_padding_mask = key_padding_mask.unsqueeze(1).unsqueeze(2) # [batch_size, 1, 1, seq_len]\n",
264
+ " \"\"\"\n",
265
+ " # Apply the mask to attention scores\n",
266
+ " scores = scores.masked_fill(key_padding_mask, float('-inf'))\n",
267
+ "\n",
268
+ " # Scale by sqrt(k)\n",
269
+ " attn = F.softmax(scores, dim=-1)\n",
270
+ " attn = self.dropout(attn)\n",
271
+ " out = attn @ v # [B, num_heads, C, d_k]\n",
272
+ "\n",
273
+ " # Concat and project\n",
274
+ " # Swap C and num_heads, force memory to coalesce, then fuse back num_heads and d_k together\n",
275
+ " out = out.transpose(1, 2).contiguous().view(batch_size, -1, self.embed_dim)\n",
276
+ " # Project: give attention \"time to think\". Maybe this should be part of a different module but whatever\n",
277
+ " out = self.out_proj(out)\n",
278
+ " return(out)\n",
279
+ "\n"
280
+ ]
281
+ },
282
+ {
283
+ "cell_type": "code",
284
+ "execution_count": 9,
285
+ "metadata": {},
286
+ "outputs": [],
287
+ "source": [
288
+ "class FeedForward(nn.Module):\n",
289
+ " def __init__(self, embed_dim, dropout):\n",
290
+ " super().__init__()\n",
291
+ " self.net = nn.Sequential(\n",
292
+ " nn.Linear(embed_dim, 4 * embed_dim),\n",
293
+ " nn.ReLU(),\n",
294
+ " nn.Dropout(dropout)\n",
295
+ " nn.Linear(4 * embed_dim, embed_dim),\n",
296
+ " )\n",
297
+ "\n",
298
+ " def forward(self, x):\n",
299
+ " return(self.net(x))"
300
+ ]
301
+ },
302
+ {
303
+ "cell_type": "code",
304
+ "execution_count": 10,
305
+ "metadata": {},
306
+ "outputs": [],
307
+ "source": [
308
+ "class Block(nn.Module):\n",
309
+ " \"\"\"Self-attention\"\"\"\n",
310
+ " def __init__(self, embed_dim, num_heads, mask, dropout=0.2):\n",
311
+ " super(Block, self).__init__() \n",
312
+ " self.register_buffer(\"mask\", mask)\n",
313
+ " self.head = MultiheadAttention(embed_dim=embed_dim, num_heads=num_heads, dropout=dropout)\n",
314
+ " self.ffwd = FeedForward(embed_dim=embed_dim, dropout=dropout)\n",
315
+ " self.ln1 = nn.LayerNorm(embed_dim)\n",
316
+ " self.ln2 = nn.LayerNorm(embed_dim)\n",
317
+ "\n",
318
+ " def forward(self, x):\n",
319
+ " # Residual connections\n",
320
+ " x = self.ln1(x)\n",
321
+ " x = x + self.head.forward(x, x, x, key_padding_mask=self.mask) \n",
322
+ " out = x + self.ffwd(self.ln2(x))\n",
323
+ " return out\n"
324
+ ]
325
+ },
326
+ {
327
+ "cell_type": "code",
328
+ "execution_count": 11,
329
+ "metadata": {},
330
+ "outputs": [],
331
+ "source": [
332
+ "class GPT(nn.Module):\n",
333
+ " def __init__(self, embedding_dim, vocab_size, context_size, lr=1e-3):\n",
334
+ " # Inherit PyTorch stuff\n",
335
+ " super(GPT, self).__init__()\n",
336
+ "\n",
337
+ " # Save variables for later\n",
338
+ " self.embedding_dim = embedding_dim\n",
339
+ " self.output_dim = vocab_size\n",
340
+ " self.context_size = context_size\n",
341
+ "\n",
342
+ " # Initialize layers. Sadly this breaks the whole \"self.layers: concept but whatever\n",
343
+ " self.tok_embed = nn.Embedding(vocab_size, embedding_dim)\n",
344
+ " self.pos_embed = nn.Embedding(context_size, embedding_dim)\n",
345
+ "\n",
346
+ " NUM_HEADS=6\n",
347
+ " NUM_LAYERS=6\n",
348
+ " \n",
349
+ " mask = torch.tril(torch.ones(self.context_size, self.context_size)).bool()\n",
350
+ " mask = ~mask\n",
351
+ " self.register_buffer(mask)\n",
352
+ "\n",
353
+ " self.blocks = nn.Sequential(\n",
354
+ " *[Block(embed_dim=embedding_dim, num_heads=NUM_HEADS, mask=mask) for _ in range(NUM_LAYERS)],\n",
355
+ " nn.Dropout(0.2)\n",
356
+ " )\n",
357
+ "\n",
358
+ " # Final feed-forward layer from embeddings\n",
359
+ " self.ffwd = nn.Linear(embedding_dim, out_features=vocab_size)\n",
360
+ "\n",
361
+ " def forward(self, x):\n",
362
+ " tok_embed = self.tok_embed(x)\n",
363
+ " tok_embed = tok_embed.view(-1, self.context_size, self.embedding_dim)\n",
364
+ " pos_embed = self.pos_embed(torch.arange(0, self.context_size, device=\"cuda\")).unsqueeze(0)\n",
365
+ " x = tok_embed + pos_embed\n",
366
+ "\n",
367
+ " # The actual attention is all you need here!\n",
368
+ " # B*C*C cutting out the future\n",
369
+ " x = self.blocks(x)\n",
370
+ "\n",
371
+ " preds = self.ffwd(x)\n",
372
+ " return(preds)\n",
373
+ " \n",
374
+ " def infer(self, x):\n",
375
+ " with torch.no_grad():\n",
376
+ " res = self.forward(x)\n",
377
+ " return(res)\n"
378
+ ]
379
+ },
380
+ {
381
+ "attachments": {},
382
+ "cell_type": "markdown",
383
+ "metadata": {},
384
+ "source": [
385
+ "## Training"
386
+ ]
387
+ },
388
+ {
389
+ "cell_type": "code",
390
+ "execution_count": 19,
391
+ "metadata": {},
392
+ "outputs": [],
393
+ "source": [
394
+ "def compute_loss(model, criterion, x, y):\n",
395
+ " logits = model(x)\n",
396
+ " last_logits = logits[:, -1, :]\n",
397
+ " log_probs = nn.LogSoftmax(dim=1)(last_logits)\n",
398
+ " loss = criterion(log_probs, y.view(-1).long())\n",
399
+ " return loss"
400
+ ]
401
+ },
402
+ {
403
+ "cell_type": "code",
404
+ "execution_count": 47,
405
+ "metadata": {},
406
+ "outputs": [],
407
+ "source": [
408
+ "EMBEDDING_NDIM = 384\n",
409
+ "VOCAB_SIZE = 128\n",
410
+ "BATCH_SIZE=64\n",
411
+ "# \"Context window\"\n",
412
+ "BLOCK_SIZE=256\n",
413
+ "LR=1e-3\n",
414
+ "\n",
415
+ "train_dataset = TextDataset(train_data, BLOCK_SIZE)\n",
416
+ "test_dataset = TextDataset(train_data, BLOCK_SIZE)\n",
417
+ "\n",
418
+ "# Janky training code\n",
419
+ "model = GPT(\n",
420
+ " embedding_dim=EMBEDDING_NDIM, \n",
421
+ " vocab_size=VOCAB_SIZE,\n",
422
+ " context_size=BLOCK_SIZE,\n",
423
+ " lr=LR\n",
424
+ " )\n",
425
+ "\n",
426
+ "model = model.to('cuda')\n",
427
+ "optimizer = optim.AdamW(model.parameters(), lr=LR)\n",
428
+ "# TODO Fix this!\n",
429
+ "scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=10000, gamma=0.2)\n",
430
+ "criterion = nn.NLLLoss()"
431
+ ]
432
+ },
433
+ {
434
+ "cell_type": "code",
435
+ "execution_count": 50,
436
+ "metadata": {},
437
+ "outputs": [
438
+ {
439
+ "name": "stdout",
440
+ "output_type": "stream",
441
+ "text": [
442
+ "Step 0; loss: 3.3686537742614746\n",
443
+ "Step 100; loss: 3.3535483678181968\n",
444
+ "Step 200; loss: 3.3484479188919067\n",
445
+ "Step 300; loss: 3.344235420227051\n",
446
+ "Step 400; loss: 3.338580369949341\n",
447
+ "Step 500; loss: 3.330465725490025\n",
448
+ "Step 600; loss: 3.333183079957962\n",
449
+ "Step 700; loss: 3.3319032986958823\n",
450
+ "Step 800; loss: 3.332624101638794\n",
451
+ "Step 900; loss: 3.3325188810175117\n",
452
+ "Step 1000; loss: 3.331260542074839\n",
453
+ "Step 1100; loss: 3.3311657355381894\n"
454
+ ]
455
+ },
456
+ {
457
+ "ename": "KeyboardInterrupt",
458
+ "evalue": "",
459
+ "output_type": "error",
460
+ "traceback": [
461
+ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
462
+ "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
463
+ "\u001b[1;32m/home/ritsuko/projects/ai/micrograd/gpt.ipynb Cell 20\u001b[0m in \u001b[0;36m2\n\u001b[1;32m <a href='vscode-notebook-cell:/home/ritsuko/projects/ai/micrograd/gpt.ipynb#X24sZmlsZQ%3D%3D?line=24'>25</a>\u001b[0m \u001b[39m# Backward pass\u001b[39;00m\n\u001b[1;32m <a href='vscode-notebook-cell:/home/ritsuko/projects/ai/micrograd/gpt.ipynb#X24sZmlsZQ%3D%3D?line=25'>26</a>\u001b[0m optimizer\u001b[39m.\u001b[39mzero_grad()\n\u001b[0;32m---> <a href='vscode-notebook-cell:/home/ritsuko/projects/ai/micrograd/gpt.ipynb#X24sZmlsZQ%3D%3D?line=26'>27</a>\u001b[0m loss\u001b[39m.\u001b[39;49mbackward()\n\u001b[1;32m <a href='vscode-notebook-cell:/home/ritsuko/projects/ai/micrograd/gpt.ipynb#X24sZmlsZQ%3D%3D?line=27'>28</a>\u001b[0m optimizer\u001b[39m.\u001b[39mstep()\n\u001b[1;32m <a href='vscode-notebook-cell:/home/ritsuko/projects/ai/micrograd/gpt.ipynb#X24sZmlsZQ%3D%3D?line=28'>29</a>\u001b[0m scheduler\u001b[39m.\u001b[39mstep()\n",
464
+ "File \u001b[0;32m~/.local/lib/python3.10/site-packages/torch/_tensor.py:396\u001b[0m, in \u001b[0;36mTensor.backward\u001b[0;34m(self, gradient, retain_graph, create_graph, inputs)\u001b[0m\n\u001b[1;32m 387\u001b[0m \u001b[39mif\u001b[39;00m has_torch_function_unary(\u001b[39mself\u001b[39m):\n\u001b[1;32m 388\u001b[0m \u001b[39mreturn\u001b[39;00m handle_torch_function(\n\u001b[1;32m 389\u001b[0m Tensor\u001b[39m.\u001b[39mbackward,\n\u001b[1;32m 390\u001b[0m (\u001b[39mself\u001b[39m,),\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 394\u001b[0m create_graph\u001b[39m=\u001b[39mcreate_graph,\n\u001b[1;32m 395\u001b[0m inputs\u001b[39m=\u001b[39minputs)\n\u001b[0;32m--> 396\u001b[0m torch\u001b[39m.\u001b[39;49mautograd\u001b[39m.\u001b[39;49mbackward(\u001b[39mself\u001b[39;49m, gradient, retain_graph, create_graph, inputs\u001b[39m=\u001b[39;49minputs)\n",
465
+ "File \u001b[0;32m~/.local/lib/python3.10/site-packages/torch/autograd/__init__.py:173\u001b[0m, in \u001b[0;36mbackward\u001b[0;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables, inputs)\u001b[0m\n\u001b[1;32m 168\u001b[0m retain_graph \u001b[39m=\u001b[39m create_graph\n\u001b[1;32m 170\u001b[0m \u001b[39m# The reason we repeat same the comment below is that\u001b[39;00m\n\u001b[1;32m 171\u001b[0m \u001b[39m# some Python versions print out the first line of a multi-line function\u001b[39;00m\n\u001b[1;32m 172\u001b[0m \u001b[39m# calls in the traceback and some print out the last line\u001b[39;00m\n\u001b[0;32m--> 173\u001b[0m Variable\u001b[39m.\u001b[39;49m_execution_engine\u001b[39m.\u001b[39;49mrun_backward( \u001b[39m# Calls into the C++ engine to run the backward pass\u001b[39;49;00m\n\u001b[1;32m 174\u001b[0m tensors, grad_tensors_, retain_graph, create_graph, inputs,\n\u001b[1;32m 175\u001b[0m allow_unreachable\u001b[39m=\u001b[39;49m\u001b[39mTrue\u001b[39;49;00m, accumulate_grad\u001b[39m=\u001b[39;49m\u001b[39mTrue\u001b[39;49;00m)\n",
466
+ "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
467
+ ]
468
+ }
469
+ ],
470
+ "source": [
471
+ "from torch.utils.\n",
472
+ "EPOCHS = 1\n",
473
+ "STEPS = 5000\n",
474
+ "VAL_INTERVAL = 100\n",
475
+ "\n",
476
+ "losses = []\n",
477
+ "model.train()\n",
478
+ "\n",
479
+ "train_dataloader = DataLoader(\n",
480
+ " train_dataset, \n",
481
+ " batch_size=BATCH_SIZE, \n",
482
+ " shuffle=True, \n",
483
+ " num_workers=4\n",
484
+ ")\n",
485
+ "\n",
486
+ "test_dataloader = DataLoader(test_dataset, batch_size=512, num_workers=4, shuffle=True)\n",
487
+ "\n",
488
+ "step = 0\n",
489
+ "for epoch in range(EPOCHS):\n",
490
+ " for data, target in train_dataloader:\n",
491
+ " data = data.to('cuda')\n",
492
+ " target = target.to('cuda')\n",
493
+ "\n",
494
+ " loss = compute_loss(model, criterion, data, target)\n",
495
+ "\n",
496
+ " # Backward pass\n",
497
+ " optimizer.zero_grad()\n",
498
+ " loss.backward()\n",
499
+ " optimizer.step()\n",
500
+ " scheduler.step()\n",
501
+ "\n",
502
+ " losses.append(loss.cpu().detach().numpy())\n",
503
+ "\n",
504
+ " if step % VAL_INTERVAL == 0:\n",
505
+ " with torch.no_grad():\n",
506
+ " model.eval()\n",
507
+ " for x, y in test_dataloader:\n",
508
+ " x = x.to(\"cuda\")\n",
509
+ " y = y.to(\"cuda\")\n",
510
+ "\n",
511
+ " batch_loss = compute_loss(model, criterion, x, y)\n",
512
+ " total_loss += batch_loss.item() * 512\n",
513
+ " total_samples += 512\n",
514
+ " if total_samples > 10:\n",
515
+ " break\n",
516
+ "\n",
517
+ " average_loss = total_loss / total_samples\n",
518
+ " print(f\"Step {step}; loss: {average_loss}\")\n",
519
+ " model.train()\n",
520
+ "\n",
521
+ " step += 1\n",
522
+ " if step >= STEPS:\n",
523
+ " break\n"
524
+ ]
525
+ },
526
+ {
527
+ "cell_type": "code",
528
+ "execution_count": 15,
529
+ "metadata": {},
530
+ "outputs": [],
531
+ "source": [
532
+ "PATH = \"checkpoints/model.pt\""
533
+ ]
534
+ },
535
+ {
536
+ "cell_type": "code",
537
+ "execution_count": 36,
538
+ "metadata": {},
539
+ "outputs": [],
540
+ "source": [
541
+ "\n",
542
+ "# Store\n",
543
+ "torch.save({\n",
544
+ " 'steps': step,\n",
545
+ " 'model_state_dict': model.state_dict(),\n",
546
+ " 'optimizer_state_dict': optimizer.state_dict(),\n",
547
+ "}, PATH)"
548
+ ]
549
+ },
550
+ {
551
+ "cell_type": "code",
552
+ "execution_count": 18,
553
+ "metadata": {},
554
+ "outputs": [],
555
+ "source": [
556
+ "checkpoint = torch.load(PATH)\n",
557
+ "model.load_state_dict(checkpoint['model_state_dict'])\n",
558
+ "optimizer.load_state_dict(checkpoint['optimizer_state_dict'])"
559
+ ]
560
+ },
561
+ {
562
+ "attachments": {},
563
+ "cell_type": "markdown",
564
+ "metadata": {},
565
+ "source": [
566
+ "Now we test for overfitting:"
567
+ ]
568
+ },
569
+ {
570
+ "cell_type": "code",
571
+ "execution_count": 37,
572
+ "metadata": {},
573
+ "outputs": [
574
+ {
575
+ "data": {
576
+ "text/plain": [
577
+ "2399"
578
+ ]
579
+ },
580
+ "execution_count": 37,
581
+ "metadata": {},
582
+ "output_type": "execute_result"
583
+ }
584
+ ],
585
+ "source": [
586
+ "import gc\n",
587
+ "gc.collect()"
588
+ ]
589
+ },
590
+ {
591
+ "cell_type": "code",
592
+ "execution_count": 51,
593
+ "metadata": {},
594
+ "outputs": [
595
+ {
596
+ "name": "stdout",
597
+ "output_type": "stream",
598
+ "text": [
599
+ "3.4188449382781982\n"
600
+ ]
601
+ }
602
+ ],
603
+ "source": [
604
+ "model.eval()\n",
605
+ "total_loss = 0.0\n",
606
+ "total_samples = 0\n",
607
+ "\n",
608
+ "test_dataloader = DataLoader(test_dataset, batch_size=512, num_workers=4)\n",
609
+ "with torch.no_grad():\n",
610
+ " for x, y in test_dataloader:\n",
611
+ " x = x.to(\"cuda\")\n",
612
+ " y = y.to(\"cuda\")\n",
613
+ "\n",
614
+ " batch_loss = compute_loss(model, criterion, x, y)\n",
615
+ " total_loss += batch_loss.item() * x.size(0)\n",
616
+ " total_samples += x.size(0)\n",
617
+ " if total_samples > 100:\n",
618
+ " break\n",
619
+ "\n",
620
+ " average_loss = total_loss / total_samples\n",
621
+ " print(average_loss)"
622
+ ]
623
+ },
624
+ {
625
+ "cell_type": "code",
626
+ "execution_count": null,
627
+ "metadata": {},
628
+ "outputs": [],
629
+ "source": []
630
+ },
631
+ {
632
+ "attachments": {},
633
+ "cell_type": "markdown",
634
+ "metadata": {},
635
+ "source": [
636
+ "Finally, we generate:"
637
+ ]
638
+ },
639
+ {
640
+ "cell_type": "code",
641
+ "execution_count": 52,
642
+ "metadata": {},
643
+ "outputs": [
644
+ {
645
+ "name": "stdout",
646
+ "output_type": "stream",
647
+ "text": [
648
+ ",n aon mr\n",
649
+ "nr\n",
650
+ "egtel s.mangtVk h\n",
651
+ " -hinSfii ol ihIraddeioi akpshaC.n trU d aamooaa eoeEhl:daoUabo'm-fddE auh hpyHs wv'erstiInnmwt hnAuNu ufl\n",
652
+ "I: rl.T l!eool'lIhl:aynet nna:i yaneehtea hdel\n",
653
+ " hse l;imi\n",
654
+ " hgy f iuto eoh gBum.umhemvt\n",
655
+ "a hFo lNsute oaaenh;byeon"
656
+ ]
657
+ }
658
+ ],
659
+ "source": [
660
+ "g_cuda = torch.Generator(device='cuda')\n",
661
+ "\n",
662
+ "contexts = torch.tensor(encode_text(\"God\"), dtype=torch.int32).to('cuda')\n",
663
+ "GEN_LENGTH=256\n",
664
+ "\n",
665
+ "model.eval()\n",
666
+ "for i in range(GEN_LENGTH):\n",
667
+ " transform = nn.LogSoftmax(1)\n",
668
+ " # What happens if GEN_LENGTH > CONTEXT? don't worry about it\n",
669
+ " #x = F.pad(contexts[:, -BLOCK_SIZE:], (0, BLOCK_SIZE - contexts.size(0)), \"constant\", 0)\n",
670
+ " x = contexts[-BLOCK_SIZE:]\n",
671
+ " x = F.pad(x, (0, BLOCK_SIZE - x.size(0)), \"constant\", 0).unsqueeze(0) # B*T\n",
672
+ " preds = model.infer(x)\n",
673
+ " preds = preds.squeeze(0)\n",
674
+ " probs = torch.softmax(preds, dim=-1)\n",
675
+ "\n",
676
+ " # TODO: Broken because of bug with the trailing 0s. FIX THIS\n",
677
+ " next_char = torch.multinomial(torch.exp(preds[(-1 if i >= BLOCK_SIZE else i), :]), num_samples=1, generator=g_cuda)\n",
678
+ " #context = torch.cat(context, next_char)\n",
679
+ " contexts = torch.cat((contexts, next_char), dim=0)\n",
680
+ " print(decode_text(next_char.cpu().numpy())[-1], end=\"\")\n",
681
+ "\n",
682
+ "#print(\"\".join(decode_text(contexts.cpu().numpy())))"
683
+ ]
684
+ }
685
+ ],
686
+ "metadata": {
687
+ "kernelspec": {
688
+ "display_name": "Python 3",
689
+ "language": "python",
690
+ "name": "python3"
691
+ },
692
+ "language_info": {
693
+ "codemirror_mode": {
694
+ "name": "ipython",
695
+ "version": 3
696
+ },
697
+ "file_extension": ".py",
698
+ "mimetype": "text/x-python",
699
+ "name": "python",
700
+ "nbconvert_exporter": "python",
701
+ "pygments_lexer": "ipython3",
702
+ "version": "3.10.10"
703
+ },
704
+ "orig_nbformat": 4
705
+ },
706
+ "nbformat": 4,
707
+ "nbformat_minor": 2
708
+ }
makemore_bigram.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
makemore_mlp.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
makemore_mlp2.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
makemore_wavenet.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
micrograd.ipynb ADDED
@@ -0,0 +1,284 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 2,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "import math\n",
10
+ "import numpy as np\n",
11
+ "import matplotlib.pyplot as plt\n",
12
+ "%matplotlib inline"
13
+ ]
14
+ },
15
+ {
16
+ "attachments": {},
17
+ "cell_type": "markdown",
18
+ "metadata": {},
19
+ "source": [
20
+ "# What are derivatives?"
21
+ ]
22
+ },
23
+ {
24
+ "cell_type": "code",
25
+ "execution_count": 3,
26
+ "metadata": {},
27
+ "outputs": [],
28
+ "source": [
29
+ "def f(x):\n",
30
+ " \"\"\"Random quadratic\"\"\"\n",
31
+ " return 3*x ** 2 - 4*x + 5"
32
+ ]
33
+ },
34
+ {
35
+ "cell_type": "code",
36
+ "execution_count": 4,
37
+ "metadata": {},
38
+ "outputs": [
39
+ {
40
+ "data": {
41
+ "text/plain": [
42
+ "20"
43
+ ]
44
+ },
45
+ "execution_count": 4,
46
+ "metadata": {},
47
+ "output_type": "execute_result"
48
+ }
49
+ ],
50
+ "source": [
51
+ "f(3)"
52
+ ]
53
+ },
54
+ {
55
+ "cell_type": "code",
56
+ "execution_count": 6,
57
+ "metadata": {},
58
+ "outputs": [
59
+ {
60
+ "data": {
61
+ "text/plain": [
62
+ "[<matplotlib.lines.Line2D at 0x7fac624994b0>]"
63
+ ]
64
+ },
65
+ "execution_count": 6,
66
+ "metadata": {},
67
+ "output_type": "execute_result"
68
+ },
69
+ {
70
+ "data": {
71
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiwAAAGdCAYAAAAxCSikAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/P9b71AAAACXBIWXMAAA9hAAAPYQGoP6dpAABUsklEQVR4nO3de1xUdeI+8GdmgOE6g8hdLooXEAUvqDimZkreTdPcLNMubmaLbaW5xm6r1e6GWb/ureZuaaVmWWppXtZMMQVRURTxjiggDCDIDLeZYWbO7w9wvqF44Xpmhuf9es2rmHNmeI4HnMdzPudzJIIgCCAiIiKyYlKxAxARERHdDQsLERERWT0WFiIiIrJ6LCxERERk9VhYiIiIyOqxsBAREZHVY2EhIiIiq8fCQkRERFbPQewATWE2m5Gfnw8PDw9IJBKx4xAREdE9EAQB5eXlCAwMhFTauGMmNllY8vPzERwcLHYMIiIiaoLc3FwEBQU16jU2WVg8PDwA1G6wQqEQOQ0RERHdC61Wi+DgYMvneGPYZGG5cRpIoVCwsBAREdmYpgzn4KBbIiIisnosLERERGT1WFiIiIjI6rGwEBERkdVrVGFZsWIFoqOjLYNdVSoVduzYYVk+YsQISCSSeo958+bVe4+cnBxMmDABrq6u8PX1xaJFi2A0Gltma4iIiMguNeoqoaCgICxbtgzdu3eHIAj48ssvMXnyZBw/fhy9evUCADz77LN48803La9xdXW1/L/JZMKECRPg7++P5ORkFBQUYPbs2XB0dMRbb73VQptERERE9kYiCILQnDfw8vLCO++8gzlz5mDEiBHo27cvPvjggwbX3bFjByZOnIj8/Hz4+fkBAFauXInFixejuLgYTk5O9/Q9tVotlEolNBoNL2smIiKyEc35/G7yGBaTyYQNGzagsrISKpXK8vy6devg7e2N3r17IyEhAVVVVZZlKSkpiIqKspQVABgzZgy0Wi0yMzObGoWIiIjsXKMnjsvIyIBKpYJOp4O7uzs2b96MyMhIAMDjjz+O0NBQBAYG4uTJk1i8eDHOnTuHTZs2AQDUanW9sgLA8rVarb7t99Tr9dDr9ZavtVptY2MTERGRDWt0YQkPD0d6ejo0Gg2+//57PPnkk0hKSkJkZCTmzp1rWS8qKgoBAQEYNWoUsrKy0LVr1yaHTExMxBtvvNHk1xMREZFta/QpIScnJ3Tr1g0xMTFITExEnz598OGHHza4bmxsLADg4sWLAAB/f38UFhbWW+fG1/7+/rf9ngkJCdBoNJZHbm5uY2MTERGRDWv2PCxms7ne6ZrfS09PBwAEBAQAAFQqFTIyMlBUVGRZZ/fu3VAoFJbTSg2Ry+WWS6l5/yAiIqL2p1GnhBISEjBu3DiEhISgvLwc69evx759+7Br1y5kZWVh/fr1GD9+PDp27IiTJ0/i5ZdfxvDhwxEdHQ0AGD16NCIjIzFr1iwsX74carUar732GuLj4yGXy1tlAxvjdL4W6w9fwcDOXpjct5PYcYiIiKhOowpLUVERZs+ejYKCAiiVSkRHR2PXrl148MEHkZubi19++QUffPABKisrERwcjGnTpuG1116zvF4mk2Hbtm14/vnnoVKp4ObmhieffLLevC1iSjpfjLWHcnBOXc7CQkREZEWaPQ+LGFprHpZCrQ6qxD0wC8C+V0ags7dbi703ERFReyfKPCz2yE/hjOE9fAAAPxzLEzkNERER3cDCcpNHYoIAAD+k5cFstrmDT0RERHaJheUmcT39oHB2QL5Gh+SsErHjEBEREVhYbuHsKMNDfQMBAN+ncb4XIiIia8DC0oBHYoIBADsz1dDqakROQ0RERCwsDegTpER3X3foaszYfrJA7DhERETtHgtLAyQSiWXw7fdpvFqIiIhIbCwst/Fwv06QSoCjV67jUnGF2HGIiIjaNRaW2/BVOON+zslCRERkFVhY7uDG4NtNx67CxDlZiIiIRMPCcgejevpC6eKIAo0OyVnXxI5DRETUbrGw3IGzowyTLXOy8LQQERGRWFhY7uLG1UI7T6mhqeacLERERGJgYbmLqE5K9PBzh95oxs+ck4WIiEgULCx3UX9OFk7VT0REJAYWlnswpW8nyKQSHMspQxbnZCEiImpzLCz3oN6cLBx8S0RE1OZYWO7R9LrTQpyThYiIqO2xsNyjkT194enqCLVWhwMXOScLERFRW2JhuUdyBxkm9+GcLERERGJgYWmEG1P178rknCxERERtiYWlEXp3UiDczwMGoxnbTuaLHYeIiKjdYGFpBIlEgukDagffbjzK00JERERthYWlkSbXzcmSnluGi0XlYschIiJqF1hYGsnHQ44HwmvnZPk+7arIaYiIiNoHFpYmuDFV/+bjeZyThYiIqA2wsDTByAg/dHB1RKFWj98uFIsdh4iIyO6xsDSBk4MUk/t2AgBs5JwsRERErY6FpYlunBbanVmIsiqDyGmIiIjsGwtLE/UKVCDC3wMGkxlbjnPwLRERUWtiYWkiiUSCGQNrZ77dcCQXgsDBt0RERK2FhaUZHu4XBLmDFGfV5UjPLRM7DhERkd1iYWkGpasjxkcFAAA2HM4VOQ0REZH9YmFpphunhbaezEeF3ihyGiIiIvvEwtJMg7p4IczHDVUGE35K5w0RiYiIWgMLSzPVH3ybI3IaIiIi+8TC0gKm9Q+Co0yCk3kaZOZrxI5DRERkd1hYWkBHdzlGR/oD4OBbIiKi1sDC0kJmDKo9LbQl/SqqDSaR0xAREdmXRhWWFStWIDo6GgqFAgqFAiqVCjt27LAs1+l0iI+PR8eOHeHu7o5p06ahsLCw3nvk5ORgwoQJcHV1ha+vLxYtWgSj0favrrmvqzeCvVxQrjPi54wCseMQERHZlUYVlqCgICxbtgxpaWk4evQoRo4cicmTJyMzMxMA8PLLL2Pr1q3YuHEjkpKSkJ+fj6lTp1pebzKZMGHCBBgMBiQnJ+PLL7/EmjVrsGTJkpbdKhFIpRI8OqBu8O1hDr4lIiJqSRKhmXPKe3l54Z133sEjjzwCHx8frF+/Ho888ggA4OzZs+jZsydSUlIwePBg7NixAxMnTkR+fj78/PwAACtXrsTixYtRXFwMJyene/qeWq0WSqUSGo0GCoWiOfFbVKFWhyHLfoXJLGD3y8PR3c9D7EhERERWozmf300ew2IymbBhwwZUVlZCpVIhLS0NNTU1iIuLs6wTERGBkJAQpKSkAABSUlIQFRVlKSsAMGbMGGi1WstRmobo9Xpotdp6D2vkp3DGyAhfALX3FyIiIqKW0ejCkpGRAXd3d8jlcsybNw+bN29GZGQk1Go1nJyc4OnpWW99Pz8/qNVqAIBara5XVm4sv7HsdhITE6FUKi2P4ODgxsZuM4/VDb7ddCwPeiMH3xIREbWERheW8PBwpKenIzU1Fc8//zyefPJJnD59ujWyWSQkJECj0VgeubnWe/Ti/h6+CFA643pVDXZlFt79BURERHRXjS4sTk5O6NatG2JiYpCYmIg+ffrgww8/hL+/PwwGA8rKyuqtX1hYCH//2jlK/P39b7lq6MbXN9ZpiFwut1yZdONhrWRSCaZz8C0REVGLavY8LGazGXq9HjExMXB0dMSePXssy86dO4ecnByoVCoAgEqlQkZGBoqKiizr7N69GwqFApGRkc2NYjX+MCAIEgmQnFWCKyWVYschIiKyeQ6NWTkhIQHjxo1DSEgIysvLsX79euzbtw+7du2CUqnEnDlzsGDBAnh5eUGhUOCFF16ASqXC4MGDAQCjR49GZGQkZs2aheXLl0OtVuO1115DfHw85HJ5q2ygGII6uGJ4dx8knS/GhiO5WDw2QuxIRERENq1RR1iKioowe/ZshIeHY9SoUThy5Ah27dqFBx98EADw/vvvY+LEiZg2bRqGDx8Of39/bNq0yfJ6mUyGbdu2QSaTQaVS4YknnsDs2bPx5ptvtuxWWYEbg283Hs1DjckschoiIiLb1ux5WMRgrfOw/F6NyQxV4q+4VqHHyidiMLb37cfoEBERtQeizMNCd+Yok+KRmCAAwIYjHHxLRETUHCwsrWjGwNrTQknni3G1rFrkNERERLaLhaUVdfZ2gyqsIwQB+I4z3xIRETUZC0srm2EZfJsLk9nmhgsRERFZBRaWVjamlz88XR2Rr9Fh//liseMQERHZJBaWVubsKMPUfrWDb7/hzLdERERNwsLSBm7MybLnbBGKtDqR0xAREdkeFpY20N3PAwNCO8BkFrAxLU/sOERERDaHhaWNzBgUAqD2tBAH3xIRETUOC0sbmRAVAKWLI/KuVyPpfNHdX0BEREQWLCxtxMVJhul1M99+nXJF5DRERES2hYWlDT0xOBQAsO98MXJKqkROQ0REZDtYWNpQZ283DO/hA0EA1qXyKAsREdG9YmFpY7PrjrJ8ezQXuhqTyGmIiIhsAwtLG3sgwhedPF1QVlWDbScLxI5DRERkE1hY2phMKsHjsbWXOH99iKeFiIiI7gULiwgeHRgMJ5kUJ3LLcDKvTOw4REREVo+FRQTe7nKMj/IHwEuciYiI7gULi0hmqToDAH46kY+yKoO4YYiIiKwcC4tI+od4IjJAAb3RjI1HeX8hIiKiO2FhEYlEIsEsVe0lzmtTr8DM+wsRERHdFguLiCb3DYSHswOulFRh/4ViseMQERFZLRYWEbk6OeCRuvsLreUlzkRERLfFwiKyG/cX2nO2CLmlvL8QERFRQ1hYRNbVxx1Du3lDEID1h3PEjkNERGSVWFiswI3Bt98eyYXeyPsLERER3YyFxQqMivBFgNIZpZUGbM/g/YWIiIhuxsJiBRxkUjw+qO7+Qpz5loiI6BYsLFbi0UHBcJRJcCynDKeuasSOQ0REZFVYWKyEr4czxvYOAMBLnImIiG7GwmJFZtcNvt2SfhWa6hqR0xAREVkPFhYrMiC0AyL8PaCrMeP7NN5fiIiI6AYWFisikUgsE8mtPcT7CxEREd3AwmJlHu7XCe5yB2Rfq0RyVonYcYiIiKwCC4uVcZM7YFr/TgCAr1IuixuGiIjISrCwWKEbM9/+cqYQV8uqRU5DREQkPhYWK9TN1wOqsI4wC7zEmYiICGBhsVpP39cZALA+NQfVBt5fiIiI2rdGFZbExEQMHDgQHh4e8PX1xZQpU3Du3Ll664wYMQISiaTeY968efXWycnJwYQJE+Dq6gpfX18sWrQIRqOx+VtjR0b19EOIlys01TX44RgvcSYiovatUYUlKSkJ8fHxOHToEHbv3o2amhqMHj0alZWV9dZ79tlnUVBQYHksX77cssxkMmHChAkwGAxITk7Gl19+iTVr1mDJkiUts0V2QiaVWI6yrD6YzUuciYioXZMIgtDkT8Li4mL4+voiKSkJw4cPB1B7hKVv37744IMPGnzNjh07MHHiROTn58PPzw8AsHLlSixevBjFxcVwcnK66/fVarVQKpXQaDRQKBRNjW/1KvRGqN7ag3K9EaufHogHwn3FjkRERNRkzfn8btYYFo2m9iZ9Xl5e9Z5ft24dvL290bt3byQkJKCqqsqyLCUlBVFRUZayAgBjxoyBVqtFZmZmg99Hr9dDq9XWe7QH7nIH/GFgMADgiwPZIqchIiIST5MLi9lsxksvvYT77rsPvXv3tjz/+OOPY+3atdi7dy8SEhLw9ddf44knnrAsV6vV9coKAMvXarW6we+VmJgIpVJpeQQHBzc1ts15akhnSCXAbxeu4XxhudhxiIiIROHQ1BfGx8fj1KlTOHDgQL3n586da/n/qKgoBAQEYNSoUcjKykLXrl2b9L0SEhKwYMECy9darbbdlJZgL1eMjvTHzkw1Vh/MRuLUaLEjERERtbkmHWGZP38+tm3bhr179yIoKOiO68bGxgIALl68CADw9/dHYWFhvXVufO3v79/ge8jlcigUinqP9mTOsC4AgE3HrqK00iByGiIiorbXqMIiCALmz5+PzZs349dff0WXLl3u+pr09HQAQEBAAABApVIhIyMDRUVFlnV2794NhUKByMjIxsRpNwaEdkBUJyX0RjO+OZwjdhwiIqI216jCEh8fj7Vr12L9+vXw8PCAWq2GWq1GdXXt9PFZWVn4xz/+gbS0NFy+fBk//fQTZs+ejeHDhyM6uvZUxujRoxEZGYlZs2bhxIkT2LVrF1577TXEx8dDLpe3/BbaAYlEgmeGdgYAfJl8GQajWdxAREREbaxRhWXFihXQaDQYMWIEAgICLI9vv/0WAODk5IRffvkFo0ePRkREBBYuXIhp06Zh69atlveQyWTYtm0bZDIZVCoVnnjiCcyePRtvvvlmy26ZnZkQFQhfDzmKyvXYnlEgdhwiIqI21ax5WMTSXuZhudknv17Au/87j6hOSvw0/z5IJBKxIxEREd0z0eZhobb1eGwo5A5SZFzV4OiV62LHISIiajMsLDbEy80JU/t3AsCJ5IiIqH1hYbExT99Xe2XWrkw1ckur7rI2ERGRfWBhsTE9/DwwrLs3zELtFUNEREQtyWgyo0BTLXaMW7Cw2KBnhtYeZfn2SC4q9EaR0xARkT3ZlVmIYW/vxes/NXx/P7GwsNig+7v7IMzHDeV6IzYezRU7DhER2QlBELBqfxaMZgEKF0ex49TDwmKDpFIJnqkby7Im+TJMZpu7Mp2IiKxQanYpTuRpIHeQYrYqVOw49bCw2Kip/TtB6eKIKyVV2HOm8O4vICIiuotV+y8BAKbFBMHb3bpmn2dhsVGuTg54PDYEAPDFQV7iTEREzXOhsBy/ni2CRAI8OyxM7Di3YGGxYbNVoZBJJTh0qRSZ+Rqx4xARkQ37z2+1R1dGR/qhi7ebyGluxcJiwwKULhgfVXsX7C8OXBY3DBER2awirQ5bjucDAOYOt76jKwALi82bU3eJ89YT+Sgq14mchoiIbNHq5MswmMyICe2AmFAvseM0iIXFxvUN9kT/EE8YTGasPZQjdhwiIrIxFXoj1h26AsB6j64ALCx2Yc7Q2h+wtYeuQFdjEjkNERHZkm+P5EKrM6KLtxse7OkndpzbYmGxA2N6+SGogwtKKw2cSI6IiO5ZjclsuZnuH4d1gVQqETnR7bGw2AEHmdRyCdqq3y7BaDKLnIiIiGzB9owCXC2rRkc3J0zrHyR2nDtiYbETfxgQDC83J+SWVmP7KbXYcYiIyMrVTsNfeynzk0M6w9lRJnKiO2NhsRMuTjI8qeoMAFi5LwuCwOn6iYjo9pKzSpCZr4WzoxSzBlvXNPwNYWGxI7NVoXBxlOF0gRa/XbgmdhwiIrJin9UdXfnDgGB0cHMSOc3dsbDYkQ5uTpgxKBgAsDIpS+Q0RERkrc4UaLH/fDGkEuCPQ633UubfY2GxM38cFgYHqQTJWSU4mVcmdhwiIrJCN6bhH9c7ACEdXUVOc29YWOxMJ08XPNQnEACPshAR0a0KNNX4Kd26p+FvCAuLHXru/q4AgB2n1Mi+VilyGiIisiarD16G0SxgUBcv9An2FDvOPWNhsUPh/h4YGeELQYDlkjUiIiKtrgbrU2tv4/KcDR1dAVhY7NbzI2qPsvxwLI83RSQiIgDAhsM5qNAb0c3XHQ+E+4odp1FYWOzUwM5eiAntAIPRjNUHL4sdh4iIRGYwmvHFgcsAgLnDwqx6Gv6GsLDYsXl1Y1nWHrqCcl2NyGmIiEhMW0/kQ63VwcdDjsn9AsWO02gsLHZsVIQvuvu6o1xntJyzJCKi9kcQBMulzE8N6Qy5g3VPw98QFhY7JpVKLJesfX4gG3qjSeREREQkhv0XruGsuhyuTjI8EWv90/A3hIXFzk3u2wkBSmcUleux+dhVseMQEZEIVu2vnZdrxsAQKF0dRU7TNCwsds7JQYo5Q7sAqL3E2WTmTRGJiNqTE7llOHixBDKpBM8M7Sx2nCZjYWkHZgwKgcLZAZeuVWL3abXYcYiIqA19svciAGBy30AEdbCNafgbwsLSDrjLHTBb1RkAsCLpEgSBR1mIiNqDMwVa7D5dCIkE+NOIbmLHaRYWlnbiqfs6Q+4gxYncMhy6VCp2HCIiagOf1h1dGR8VgG6+7iKnaR4WlnbC212O6QOCAPCmiERE7UFWcQV+zigAAMx/wLaPrgAsLO3K3GFdIZUASeeLcTpfK3YcIiJqRf/emwVBAOJ6+qFngELsOM3GwtKOhHR0xfioAADAZ/t5lIWIyF7lllZhS3rtVBbzR9r+0RWAhaXduTFd/7aTBcgtrRI5DRERtYYVSVkwmQUM6+6NvsGeYsdpEY0qLImJiRg4cCA8PDzg6+uLKVOm4Ny5c/XW0el0iI+PR8eOHeHu7o5p06ahsLCw3jo5OTmYMGECXF1d4evri0WLFsFoNDZ/a+iuendSYlh3b5jMAo+yEBHZoQJNNb4/mgcAeGFkd5HTtJxGFZakpCTEx8fj0KFD2L17N2pqajB69GhUVlZa1nn55ZexdetWbNy4EUlJScjPz8fUqVMty00mEyZMmACDwYDk5GR8+eWXWLNmDZYsWdJyW0V3FF83+Oq7I3ko0FSLnIaIiFrSqv2XYDCZMaiLFwZ18RI7TouRCM2YlKO4uBi+vr5ISkrC8OHDodFo4OPjg/Xr1+ORRx4BAJw9exY9e/ZESkoKBg8ejB07dmDixInIz8+Hn58fAGDlypVYvHgxiouL4eTkdNfvq9VqoVQqodFooFDY/kAiMTz6WQpSs0vxpCoUb0zuLXYcIiJqAdcq9Bj69q/Q1Zjx9ZxBGNbdR+xI9TTn87tZY1g0Gg0AwMurtsGlpaWhpqYGcXFxlnUiIiIQEhKClJQUAEBKSgqioqIsZQUAxowZA61Wi8zMzAa/j16vh1arrfeg5nlxVO1hwm+O5KJQqxM5DRERtYTPD2RDV2NGn2BPDO3mLXacFtXkwmI2m/HSSy/hvvvuQ+/etf9CV6vVcHJygqenZ711/fz8oFarLev8vqzcWH5jWUMSExOhVCotj+Dg4KbGpjqqrh0xsHMHGIxmzstCRGQHyqoM+Cr5MgDghQe6QSKRiBuohTW5sMTHx+PUqVPYsGFDS+ZpUEJCAjQajeWRm5vb6t/T3kkkEvy57ijL+tQcFPEoCxGRTVuTfBmVBhN6Bigwqqev2HFaXJMKy/z587Ft2zbs3bsXQUFBluf9/f1hMBhQVlZWb/3CwkL4+/tb1rn5qqEbX99Y52ZyuRwKhaLeg5pvaDdv9A/xhN5oxqr9l8SOQ0RETVSuq8Hqg5cB1M5qa29HV4BGFhZBEDB//nxs3rwZv/76K7p06VJveUxMDBwdHbFnzx7Lc+fOnUNOTg5UKhUAQKVSISMjA0VFRZZ1du/eDYVCgcjIyOZsCzXS74+yrE29guJyvciJiIioKdYeyoGmugZdfdwwtnfD//i3dY0qLPHx8Vi7di3Wr18PDw8PqNVqqNVqVFfXXhqrVCoxZ84cLFiwAHv37kVaWhqefvppqFQqDB48GAAwevRoREZGYtasWThx4gR27dqF1157DfHx8ZDL5S2/hXRH9/fwQZ9gT+hqzPjvbzzKQkRka6oNJsvf3/EPdINMan9HV4BGFpYVK1ZAo9FgxIgRCAgIsDy+/fZbyzrvv/8+Jk6ciGnTpmH48OHw9/fHpk2bLMtlMhm2bdsGmUwGlUqFJ554ArNnz8abb77ZcltF90wikeCluqMsX6VcQUkFj7IQEdmSbw7noKTSgGAvFzzUJ1DsOK2mWfOwiIXzsLQsQRAw+dODOJmnwbz7u+LVcRFiRyIionugN5owfPleFGr1SJwahccGhYgd6Y5Em4eF7INEIsGfR944ynIZpZUGkRMREdG9+D4tD4VaPQKUzpjav5PYcVoVCwsBAEb19EWvQAWqDCZ8foBjWYiIrF2NyYwV+2rn0XpueBjkDjKRE7UuFhYCUP+KoS+Tr6CsikdZiIis2Y/p+ci7Xg1vdyfMsPJTQS2BhYUsHuzphwh/D1TojfjiQLbYcYiI6DZMZgH/3nsRAPDssDA4O9r30RWAhYV+RyqVWO4xtPrgZWiqakROREREDdmeUYBL1yrh6eqImYNDxY7TJlhYqJ4xvfwR7ueBcr0Rq5N5lIWIyNqYzAI+3HMBAPD0kC5wlzuInKhtsLBQPVKpBC+M6gYA+OJANrQ6HmUhIrImP524iotFFVC6OOLpoZ3FjtNmWFjoFuN7B6C7rzu0OiO+rLs3BRERia/GZMYHv9QeXZl3f1conB1FTtR2WFjoFlKpBPNH1h5l+e+BbJTzKAsRkVX4Pi0PV0qq4O3uhCeHtI+xKzewsFCDJkYHIszHDZrqGnyVckXsOERE7Z6uxoSP6sau/GlEN7g6tY+xKzewsFCDZFIJXrhxlOW3S6jUG0VORETUvm04nIMCjQ7+Cmc8Hmv/867cjIWFbmtSdCC6eLvhehWPshARianaYMIne2tntX1hVLd2Me/KzVhY6LYcZFLMf6D2KMt/fruECh5lISISxVcpl3GtQo9gLxdMjwkWO44oWFjojib3DUTnjq4orTRw9lsiIhGU62qwMqn26MqLo3rAyaF9fnS3z62me+Ygk2LB6HAAwH/2X8J13smZiKhNrT54GderahDm44YpfQPFjiMaFha6q4lRAYgMUKBcb8SKupZPREStr6zKgP/svwQAWPBgDzjI2u/HdvvdcrpnUqkEi8bUHmX5Mvky1BqdyImIiNqH//x2CeV6IyL8PTC+d4DYcUTFwkL3ZES4DwZ27gC90Wy5hwUREbWeaxV6rK6bbXzh6HBIpRJxA4mMhYXuiUQiwV/GRgAAvjuai+xrlSInIiKybyv2ZaHKYEKfICXievqKHUd0LCx0zwZ29sID4T4wmQW8t/u82HGIiOyWWqPD14dq579aODocEkn7ProCsLBQI71SN5Zl64l8ZOZrRE5DRGSfPtl7AQajGYM6e2FYd2+x41gFFhZqlF6BSkzqU3tZ3bu7zomchojI/uSWVuHbI7kAgIWje/DoSh0WFmq0BQ/2gEwqwd5zxTicXSp2HCIiu/LRnguoMQkY1t0bsWEdxY5jNVhYqNG6eLvh0YG1U0Mv33kWgiCInIiIyD5cKq7AD8fyANSOXaH/w8JCTfLnkd0hd5Di6JXr2HuuSOw4RER24YNfLsAsAHE9/dA32FPsOFaFhYWaxF/pjKeGdAYAvLPrPMxmHmUhImqOs2ottp7MB1B76p3qY2GhJpt3f1d4yB1wpuD/fsmIiKhp3vvfeQgCMCE6AJGBCrHjWB0WFmqyDm5OmDs8DADw3u7zqDGZRU5ERGSbjudcx/9OF0IqAV6O6y52HKvEwkLN8szQLvB2d8KVkip8dzRX7DhERDZHEAS8tf0MAGBa/yB08/UQOZF1YmGhZnGTOyD+gW4Aai/F09WYRE5ERGRb/ne6EEcuX4ezo5RXBt0BCws12+OxIejk6YJCrR5fJl8WOw4Rkc2oMZmxbMdZAMCzw8Lgr3QWOZH1YmGhZpM7yPBS3TnXFUlZ0OpqRE5ERGQbvjmcg+xrlfB2d8Jz93cVO45VY2GhFjG1fxC6+bqjrKoG/9l/Sew4RERWT6urwQe/XAAAvBTXA+5yB5ETWTcWFmoRMqkEr4yunTfg8wPZKC7Xi5yIiMi6rdyXhdJKA7r6uGFG3ezhdHssLNRixvTyR58gJaoMJnz86wWx4xARWa38smp8fiAbAJAwriccZPw4vhv+CVGLkUgkWDwuAgCwLjUHF4vKRU5ERGSd3v3fOeiNZsR28cKonr5ix7EJLCzUooZ09UZcTz+YzAL+9fMZseMQEVmdU1c12Hz8KgDgbxN6QiKRiJzINrCwUIv76/gIOEgl2HuuGPvPF4sdh4jIatyYJE4QgMl9AxEd5Cl2JJvR6MKyf/9+TJo0CYGBgZBIJNiyZUu95U899RQkEkm9x9ixY+utU1paipkzZ0KhUMDT0xNz5sxBRUVFszaErEeYjztmqzoDAP7582kYOWU/EREAYN+5YiRnlcDJQYpXOElcozS6sFRWVqJPnz749NNPb7vO2LFjUVBQYHl888039ZbPnDkTmZmZ2L17N7Zt24b9+/dj7ty5jU9PVuvFUd3h6eqI84UV+JZT9hMRwWgyW6bgf3pIZwR7uYqcyLY0+qLvcePGYdy4cXdcRy6Xw9/fv8FlZ86cwc6dO3HkyBEMGDAAAPDxxx9j/PjxePfddxEYGNjYSGSFlK6OeGlUd7y+9TTe+995TOoTCIWzo9ixiIhEszEtDxeKKuDp6og/1d3ShO5dq4xh2bdvH3x9fREeHo7nn38eJSUllmUpKSnw9PS0lBUAiIuLg1QqRWpqaoPvp9frodVq6z3I+s0cHIowHzeUVBrw6d6LYschIhJNpd6I93afBwD8eWR3KF34D7jGavHCMnbsWHz11VfYs2cP3n77bSQlJWHcuHEwmWpviqdWq+HrW/8SLgcHB3h5eUGtVjf4nomJiVAqlZZHcDAn2LEFjjIp/ja+JwBg9YHLyCmpEjkREZE4Vu2/hOJyPUI7uuKJwaFix7FJLV5YZsyYgYceeghRUVGYMmUKtm3bhiNHjmDfvn1Nfs+EhARoNBrLIzeXYyJsxcgIXwzt5g2DyYxlO3mZMxG1P0VaHVbV3bJk8dgIODnwAt2maPU/tbCwMHh7e+PixdpTAv7+/igqKqq3jtFoRGlp6W3HvcjlcigUinoPsg0SiQSvTewJqQTYnqHG4exSsSMREbWp93afR3WNCf1DPDGud8Ofc3R3rV5Y8vLyUFJSgoCAAACASqVCWVkZ0tLSLOv8+uuvMJvNiI2Nbe04JIIIfwUeHRgCAPjHttMwmwWRExERtY1z6nJ8V3elJCeJa55GF5aKigqkp6cjPT0dAJCdnY309HTk5OSgoqICixYtwqFDh3D58mXs2bMHkydPRrdu3TBmzBgAQM+ePTF27Fg8++yzOHz4MA4ePIj58+djxowZvELIji14sPZOpBm/m+GRiMjeJe44A7MAjI/yR0yol9hxbFqjC8vRo0fRr18/9OvXDwCwYMEC9OvXD0uWLIFMJsPJkyfx0EMPoUePHpgzZw5iYmLw22+/QS6XW95j3bp1iIiIwKhRozB+/HgMHToUq1atarmtIqvj4yFHfN1lfMt3nUWVwShyIiKi1nXw4jXsO1cMR5kEfxkTIXYcmycRBMHmjs9rtVoolUpoNBqOZ7EhuhoT4t5LQt71arwU1x0vxfUQOxIRUaswmsyY+PEBnFWX46khnfH6Q73EjmQVmvP5zaHK1GacHWVIGFd7mfNnSZeg1uhETkRE1DrWHrqCs+pyeLo64sVR3cWOYxdYWKhNjY/yx4DQDqiuMWH5rrNixyEianHXKvT4f3WTxC0aE44Obk4iJ7IPLCzUpiQSCf4+MRIAsOnYVZzMKxM3EBFRC1u+8yzKdUb07qTAjLorJKn5WFiozfUJ9sTD/ToBqL3M2QaHURERNeh4znV8dzQPAPDGQ70hk/Iy5pbCwkKi+MvYcDg7SnHk8nXsONXwLRmIiGyJySxgyY+ZAIDpMUGICe0gciL7wsJCoghQumDu8K4Aaucp0NWYRE5ERNQ83x7JRcZVDTycHfCXsbyMuaWxsJBo5t0fBj+FHLml1fgs6ZLYcYiImux6pcFyIcGCB3vAx0N+l1dQY7GwkGhcnRzw2oTaAbif7ruIKyWVIiciImqa/7f7HMqqahDu54FZvBtzq2BhIVFNjA6ovZuz0YwlP2ZyAC4R2ZxTVzVYl5oDAHhzci84yPjR2hr4p0qikkgkeHNyLzjJpEg6X4xdmRyAS0S2w2wWsOTHUxAEYHLfQMSGdRQ7kt1iYSHRhfm4Y979YQCAN7aeRqWe9xkiItuw6fhVHMspg5uTDH8d31PsOHaNhYWswp8e6IZgLxcUaHT4cM8FseMQEd2VVleDZTvOAAD+PKo7/BTOIieybywsZBWcHWV486HeAIDPD2TjnLpc5ERERHf2we4LuFZhQJiPG56+r4vYceweCwtZjQcifDGmlx9MZgGvbcngAFwislpn1Vp8mXIZAPD6pF5wcuDHaWvjnzBZlSWTesHFUYYjl6/jh2NXxY5DRHQLQRCw9MdMmMwCxvbyx/AePmJHahdYWMiqdPJ0wYtxtbdiT9x+BmVVBpETERHVt/VkAVKzS+HsKMVrEznQtq2wsJDVmTO0C7r7uqOk0oB3dp0TOw4RkUWl3oh//XwaABA/ohuCOriKnKj9YGEhq+Mok+KfU2oH4K4/nIP03DJxAxER1fno1wso1OoR4uWKZ4eHiR2nXWFhIasUG9YRU/t3giAAf9ucAZOZA3CJSFwXi8rxxYFsAMDSSZFwdpSJnKh9YWEhq5UwricUzg7IzNdi7aErYschonbMbBbw6g8ZqDEJGBnhi1E9/cSO1O6wsJDV8vGQY1HdLdrf3XUOReU6kRMRUXu1LvUKjl65DjcnGf5Rd8qa2hYLC1m1xweFoE+QEuV6I976+YzYcYioHcovq8ayHWcBAH8ZG4FOni4iJ2qfWFjIqsmkEvxzShQkEmBLej6Ss66JHYmI2hFBEPDallOoNJjQP8QTswaHih2p3WJhIasXFaS0/CXx9y2nYDCaRU5ERO3F1pMF+PVsEZxkUrw9LRpSqUTsSO0WCwvZhIWjw+Ht7oSs4kr857dLYschonbgeqUBb/yUCQCIf6Abuvt5iJyofWNhIZugdHHE3ybUzij54Z4LuFhUIXIiIrJ3/9h2GiWVBvTwc8fzI7qKHafdY2EhmzGlbyeMCPeBwWjGou9PcG4WImo1SeeLsen4VUgkwLJp0by5oRXgHiCbIZFIkDg1Ch5yBxzPKbNM4ERE1JIq9Ub8dVMGAOCpIZ3RP6SDyIkIYGEhGxOgdLGcGnr3f+dwqZinhoioZb37v3O4WlaNTp4ueGV0uNhxqA4LC9mcRwcGY1h3b+iNZvzl+5M8NURELeZYznWsSb4MAHhrahTc5A7iBiILFhayOTdODbk5yXD0ynV8WfeXCxFRcxiMZrz6w0kIAjC1Xyfc38NH7Ej0OywsZJOCOrgiYXztqaHlu87iSkmlyImIyNat2JeF84UV6OjmhL9PjBQ7Dt2EhYVs1uODQqAK6whdTe2pITNPDRFRE10oLMcney8AAJZMikQHNyeRE9HNWFjIZkmlEix/JBquTjKkZpdibSrv6ExEjWc2C1j8w0nLnZgf6hModiRqAAsL2bRgL1csrruj87IdZ5FbWiVyIiKyNV8fuoJjOWVwlzvgn1N6QyLh9PvWiIWFbN6swaEY1MULVQYTFv9wEoLAU0NEdG+ullVj+c7aOzEvHhuOQN6J2WqxsJDNk0olWD4tGs6OUiRnlWD94RyxIxGRDRAEAX/dlIFKgwkDQjtgZizvxGzNWFjILnT2dsOiMbWnhhK3n8XVsmqRExGRtVt76AqSzhfDyUGKZdOieCdmK8fCQnbjqSGdERPaARV6Y91cCjw1REQNyyquwL+2nwEAvDo2At18eSdma9fowrJ//35MmjQJgYGBkEgk2LJlS73lgiBgyZIlCAgIgIuLC+Li4nDhwoV665SWlmLmzJlQKBTw9PTEnDlzUFHBKdapeWR1Vw3JHaT47cI1fHc0V+xIRGSFakxmvLQhHboaM4Z288ZTQzqLHYnuQaMLS2VlJfr06YNPP/20weXLly/HRx99hJUrVyI1NRVubm4YM2YMdDqdZZ2ZM2ciMzMTu3fvxrZt27B//37MnTu36VtBVKerjzsWju4BAPjntjMo0PDUEBHV9+EvF5BxVQOliyPend6Hp4JshERoxnFziUSCzZs3Y8qUKQBqj64EBgZi4cKFeOWVVwAAGo0Gfn5+WLNmDWbMmIEzZ84gMjISR44cwYABAwAAO3fuxPjx45GXl4fAwLtf/67VaqFUKqHRaKBQKJoan+yUySxg2opkpOeW4YFwH3zx1EBepkhEAICjl0vxh89SYBaATx/vjwnRAWJHalea8/ndomNYsrOzoVarERcXZ3lOqVQiNjYWKSkpAICUlBR4enpaygoAxMXFQSqVIjU1tcH31ev10Gq19R5EtyOTSvDu9Gg4OUix91wxNhzhqSEiAsp1NXj5u3SYBWBq/04sKzamRQuLWq0GAPj5+dV73s/Pz7JMrVbD19e33nIHBwd4eXlZ1rlZYmIilEql5REcHNySsckOdfP1wCt1p4be2JqJi0XlIiciIrG9ufU0ckur0cnTBa8/1EvsONRINnGVUEJCAjQajeWRm8t/MdPd/XFoGIZ194auxowXvkmHrsYkdiQiEsnOUwXYmJYHiQR4/9G+UDg7ih2JGqlFC4u/vz8AoLCwsN7zhYWFlmX+/v4oKiqqt9xoNKK0tNSyzs3kcjkUCkW9B9HdSKUS/L/pfeDl5oQzBVq8XTebJRG1L0VaHRI2ZQAA5t3fFYO6eImciJqiRQtLly5d4O/vjz179lie02q1SE1NhUqlAgCoVCqUlZUhLS3Nss6vv/4Ks9mM2NjYloxDBF+FM96dHg0AWH3wMvaeLbrLK4jIngiCgFe+P4nrVTXoFajAy3E9xI5ETdTowlJRUYH09HSkp6cDqB1om56ejpycHEgkErz00kv45z//iZ9++gkZGRmYPXs2AgMDLVcS9ezZE2PHjsWzzz6Lw4cP4+DBg5g/fz5mzJhxT1cIETXWyAg/yzwLr2w8gSKt7s4vICK78VXKFew/Xwy5gxQfzugLJwebGAlBDWj0njt69Cj69euHfv36AQAWLFiAfv36YcmSJQCAv/zlL3jhhRcwd+5cDBw4EBUVFdi5cyecnZ0t77Fu3TpERERg1KhRGD9+PIYOHYpVq1a10CYR3erVcRHoGaBASaUBCzeegNnMWXCJ7N3FonK8VTeb7V/H9+RstjauWfOwiIXzsFBTXCwqx8SPD0BXY8Zfx0dg7vCuYkciolZiMJrx8L8PIjNfi+E9fPDl05yPyRpYzTwsRNasm68HlkysvZTxnV3nkJGnETkREbWWD345j8x8LTq4OuKdR6JZVuwACwu1K48NCsbYXv6oMQn484bjqNQbxY5ERC3scHYpViRlAQASp0bBT+F8l1eQLWBhoXZFIpFg2bQoBCidkX2tEkt/yhQ7EhG1oHJdDV7+Nh2CAEyPCcLY3pzN1l6wsFC74+nqhA8e7QupBPg+LQ8/ncgXOxIRtQBBELD4h5O4WlaNYC8XLOVstnaFhYXapdiwjpj/QDcAwN82ZSC3tErkRETUXF8cvIztGWo4yiT4aEY/uMsdxI5ELYiFhdqtP4/qjpjQDijXG/HihuMwmsxiRyKiJjp6uRSJdZcwvzYhEv1COoiciFoaCwu1Ww4yKT54tC88nB1wLKcMH+65IHYkImqCaxV6xK8/BqNZwKQ+gZitChU7ErUCFhZq14K9XPHWw1EAgE/2XsShSyUiJyKixjCZBby44TgKtXp083XHsqlRvITZTrGwULs3qU8gpscEQRCAFzccR1E5p+4nshXv7z6PgxdL4Ookw8on+sON41bsFgsLEYDXH+qF7r7uKNTqEb/uGAxGjmchsnZ7zhTik70XAQDLpkVz6n07x8JCBMBN7oDPZsXAQ+6AI5ev458/nxY7EhHdQW5pFV7+Nh0A8KQqFA/14c1z7R0LC1GdMB93fDCjL4DaO7x+dzRX3EBE1CBdjQnPr0uDVmdE32BP/G1CpNiRqA2wsBD9zqiefngprjsA4LUtp3Ait0zcQER0ize2nsapq7X3Cfp0Zn84OfCjrD3gXia6yZ9HdkdcT18YjGbMW5uGaxV6sSMRUZ0f0vLwzeEcSCTAhzP6oZOni9iRqI2wsBDdRCqV4L1H+yLM2w0FGh3i1x1DDSeVIxLdWbUWf9uSAQB4aVQPDO/hI3IiakssLEQNUDg7YtXsGLg5yZCaXYrE7WfFjkTUrml1NXh+7THoasy4v4cPXhjZTexI1MZYWIhuo5uvB/7fH/oCAL44mI3Nx/PEDUTUTgmCgL9sPInsa5Xo5OlSe/NSKSeHa29YWIjuYGxvf8u/5F79IQOnrmpETkTU/nx+IBs7M2tvavjpzP7o4OYkdiQSAQsL0V28FNcDI8J9oDea8dzXaSitNIgdiajd2HeuCG/V3dRwycRI9A32FDcQiYaFheguZFIJPny0H0I7uuJqWTVe+OYY7+xM1AbOqrWYv/44zAIwPSYITwzmTQ3bMxYWonugdHXEqlkD4Ookw8GLJXhn1zmxIxHZtSKtDs+sPoIKvRGqsI7418O8qWF7x8JCdI/C/T3wziN9AACf7b+ErSfyRU5EZJ+qDSb88aujyNfoEObjhpVPxHByOGJhIWqMCdEBmHd/VwDAX74/icx8DsIlaklms4CXv03HyTwNOrg6YvVTA6F0dRQ7FlkBFhaiRlo0JhzDunujusaEp1cfQd71KrEjEdmNt3eexc5MNZxkUqyaPQChHd3EjkRWgoWFqJFkUgk+ebw/wv08UFSux1Orj0BTVSN2LCKb983hHHy2/xIAYPkj0RjY2UvkRGRNWFiImkDp4ojVTw+Ev8IZF4sq8OxXR6GrMYkdi8hmHbhwDa9tOQUAeCmuO6b06yRyIrI2LCxETRTo6YI1zwyEh9wBhy+XYuF3J2A2C2LHIrI5FwrL8fy6NJjMAh7u1wkvjuoudiSyQiwsRM0Q4a/AZ7Nj4CiT4OeMAvzz5zNiRyKyKdcq9Hh6zRGU64wY2LkDlk3j5cvUMBYWomYa0tUb706vvdz5i4PZ+O9vl0RORGQbdDUmPPvVUeRdr0ZoR1d8NmsA5A4ysWORlWJhIWoBk/t2QsK4CADAP38+wzlaiO7CbBawcOMJHM8pg9LFEV88NRBevEcQ3QELC1ELmTs8DE8N6QwAWPjdCaRklYgbiMiKvbf7PH4+WQBHmQQrn4hBVx93sSORlWNhIWohEokEf58YibG9/GEwmTH366M4X1gudiwiq7PhcA4+2XsRAPDWw1FQde0ociKyBSwsRC1IJpXggxl9MSC0A8p1Rjz5xWGoNTqxYxFZjR/TryJhcwYAIP6Brpg+IFjkRGQrWFiIWpizowz/fXIAuvq4oUCjw1OrD0Or48RyRDtPqbHguxMQBGBmbAheGR0udiSyISwsRK3A09UJa54eBB8POc6qyzHv6zQYjGaxYxGJZu+5IrzwzTGYzAKm9Q/CPyb35uXL1CgsLEStJNjLFaufGgg3JxmSs0rwysYTMHFiOWqHUrJKMO/rNNSYBEyIDsDb06IglbKsUOOwsBC1ot6dlFjxRAwcpBL8dCIfi75naaH2Je3Kdcz58gj0RjPievrig0f7wkHGjx5qPP7UELWy4T188NFj/SCTSrDp2FWWFmo3Tl3V4KkvDqPKYMKw7t745PH+cGRZoSZq8Z+c119/HRKJpN4jIiLCslyn0yE+Ph4dO3aEu7s7pk2bhsLCwpaOQWRVxkcF4GOWFmpHzqnLMevzVJTrjRjU2QufzYqBsyNnsaWma5Wq26tXLxQUFFgeBw4csCx7+eWXsXXrVmzcuBFJSUnIz8/H1KlTWyMGkVVhaaH24lJxBWb+NxXXq2rQJ9gTnz81AK5ODmLHIhvXKj9BDg4O8Pf3v+V5jUaDzz//HOvXr8fIkSMBAKtXr0bPnj1x6NAhDB48uDXiEFmN8VEBAIAXvjmOTceuAgDeeaQPZByASHYit7QKM/+bimsVevQMUODLpwfCw9lR7FhkB1rlCMuFCxcQGBiIsLAwzJw5Ezk5OQCAtLQ01NTUIC4uzrJuREQEQkJCkJKSctv30+v10Gq19R5EtopHWsheqTU6zPxvKgo0OnT1ccPXcwbB05X3B6KW0eKFJTY2FmvWrMHOnTuxYsUKZGdnY9iwYSgvL4darYaTkxM8PT3rvcbPzw9qtfq275mYmAilUml5BAdzZkSybTeXlr98f5KlhWzatQo9Zv73EHJKqxDa0RXrnx0Mb3e52LHIjrT4KaFx48ZZ/j86OhqxsbEIDQ3Fd999BxcXlya9Z0JCAhYsWGD5WqvVsrSQzRsfFQBBAP684Th+OJYHAFj+SDRPD5HNKS7XY9bnqcgqrkSg0hnr/hgLP4Wz2LHIzrT69WWenp7o0aMHLl68CH9/fxgMBpSVldVbp7CwsMExLzfI5XIoFIp6DyJ7MCE6AB/NqD3S8sOxPB5pIZuTU1KFR1Ym46y6HD4ecqx7djCCOriKHYvsUKsXloqKCmRlZSEgIAAxMTFwdHTEnj17LMvPnTuHnJwcqFSq1o5CZJVYWshWnSnQYtrKZFwpqUKwlws2PqdCF283sWORnWrxU0KvvPIKJk2ahNDQUOTn52Pp0qWQyWR47LHHoFQqMWfOHCxYsABeXl5QKBR44YUXoFKpeIUQtWsTomuvHuLpIbIVh7NLMefLIyjXGRHh74GvnhkEX54GolbU4oUlLy8Pjz32GEpKSuDj44OhQ4fi0KFD8PHxAQC8//77kEqlmDZtGvR6PcaMGYN///vfLR2DyObcXFp0NSb8vz/04WRbZHV+OV2I+PXHoDeaMaizF/7z5AAoXXjpMrUuiSAINnfsWavVQqlUQqPRcDwL2Z3tGQV4ccNx1JgEDAjtgP/MHoAObrw0lKzDxqO5eHVTBkxmAXE9ffHJ4/1ZqumeNefzmzd1ILIy46MC8OUzg+Dh7ICjV65j6opkXCmpFDsWET5LysKiujFWj8QEYeUTnG6f2g4LC5EVGtLVG5ueH4JOni7IvlaJqf9OxvGc62LHonZKEAQkbj+DxB1nAQBzh4fhnUeieddlalP8aSOyUt39PLA5fgh6d1KgpNKAx/5zCLsybz/BIlFrMJrMWPT9SXy2/xIAIGFcBP46vickEg4Ip7bFwkJkxXw9nPHtXBVGRvhCV2PGvLVp+OJAttixqJ3Q1Zgwb20avk/Lg0wqwfJHovHc/V3FjkXtFAsLkZVzkztg1awYzIwNgSAAb247jTe3nuZcLdSqNNU1mP35YfxypghyBylWPhGDPwzgDOMkHhYWIhvgIJPin1N649VxEQCALw5mI37dMehqTCInI3t0obAcD396EIcvl8LD2QFfPTMID0b6iR2L2jkWFiIbIZFIMO/+rvj4sX5wkkmxM1ONx/5zCCUVerGjkR35+WQBJn96EJeu1d4X6Nu5KsSGdRQ7FhELC5GtmdQnEGv/GAuliyOO55Rh6opkZF/jZc/UPEaTGW9tP4P49cdQZTBhSNeO2PrCUEQGcq4rsg4sLEQ2aFAXL/zw/BAEe7ngSkkVpnx6EL+cLhQ7Ftmokgo9Zn1+GKvqrgR6bngYvnpmEDq6y0VORvR/WFiIbFQ3X3dsev4+9A32hKa6Bn/86ij+ue00DEaz2NHIhpzILcOkjw8g5VIJXJ1k+PTx/kgY35NzrJDV4U8kkQ3z8ZDj2+cG45n7ugAA/nsgG9M/S0FuaZXIycgWbDicg+krU5Cv0SHM2w0/xt9nuacVkbVhYSGycXIHGZZMisSqWTFQODvgRG4ZJnz0G3ae4iRz1DC90YSETSfx6qYMGExmPBjphy3z70N3Pw+xoxHdFgsLkZ0Y3csf218chn4hntDqjJi3Ng2v/5QJvZGXPtP/yS+rxh9WpuCbw7mQSIBFY8Lx2RMxUDjzbstk3VhYiOxIUAdXfPecCs8NDwMArEm+jEdWpPDmiQQASM66hkkfH8CJPA08XR2x5ulBiH+gG6RSTrNP1o+FhcjOOMqkSBjfE188NQAdXB2RcVWDCR8dwLaT+WJHI5Hoakx4a/sZPPHfVJRUGhAZoMDW+UNxfw8fsaMR3TMWFiI7NTLCD9tfHIaBnTugQm/E/PXH8bfNGZwdt505erkU4z/8Dav2X4JZAB6JCcKmPw1BsJer2NGIGkUiCILN3ZBEq9VCqVRCo9FAoeCkRkR3YjSZ8d7u8/j3viwAQIS/Bz5+rB8HWNq5KoMR7+w6hzXJlyEIgJ9Cjn9NiUIcp9gnETXn85uFhaidSDpfjAXfpqOk0gBHmQRzh4dh/gPd4eIkEzsatbCUrBIs/uEkcuoub58eE4TXJkZC6cKBtSQuFhYiuieFWh3+uikDe84WAQCCvVzwj8m9MSLcV+Rk1BIq9UYs23EWXx+6AgAIUDojcWoU9y9ZDRYWIrpngiBgV2Yh3tiaiQKNDgAwISoAf58YCX+ls8jpqKkOXLiGxT+cxNWyagDAY4NC8NfxEfDg5cpkRVhYiKjRKvRGfLD7PFYnX4bJLMBd7oCFo3tgtqozZLzM1WZodTVI3H4G3xzOBQB08nTB29OiMbS7t8jJiG7FwkJETZaZr8HfNp9Cem4ZAKB3JwXeejgK0UGeouaiOxMEAf87XYjXf/q/I2WzVaH4y9gIuMsdRE5H1DAWFiJqFrNZwDdHcvD2jrPQ6oyQSIDZg0OxcEw4Z0C1QqmXSvD2zrM4llMGAAjt6Iq3p0VjcFhHcYMR3QULCxG1iOJyPd7afgabj18FUHtzxdcm9MSk6EDOhmoFTudrsXzXWew7VwwAcHaU4pn7umD+yG5wdeJRFbJ+LCxE1KKSL17Da1tO4dK12in9w/08MH9kN4yPCuD4FhFcKanEe7vP48f02tmKHaQSzBgUjD+P7A5fBQdKk+1gYSGiFqc3mrAq6RJW7b+Ecr0RANDVxw3zR3bDpOhAOMg4UXZrKyrX4eM9F/HN4RwYzbV/VU/qE4iFD/ZAZ283kdMRNR4LCxG1Gk11DdYcvIwvDmZDU10DAOjc0RV/eqAbHu7XCY4sLi1Oq6vBqqRL+PxANqrrbqVwfw8fLBoTjt6dlCKnI2o6FhYianXluhp8lXIF//3tEq5X1RaXoA4u+NOIbpgW0wlyB86Y21xaXQ02HM7Bv/dloazuz7hvsCcWj42AqisH1JLtY2EhojZTqTdiXeoVrNqfjWsVegC1M6rOu78rHh0YDGdHFpfGOplXhnWHcvDTiXzLEZVuvu5YNCYcoyP9IJFw3BDZBxYWImpzuhoTvjmcg5VJWSjU1hYXHw85Zg0OxcP9OvFuwHdRqTfix/R8rD98Baeuai3Pd/d1x7PDwzCtfxAHOJPdYWEhItHoakzYmJaHlfuyLNPCA8DAzh3wcL8gTIgKgNKVc7nccDpfi3WpV/Bjej4q6gYzOzlIMb63P2YODsWA0A48okJ2i4WFiERnMJqx7WQ+fjiWh+SsEtz4m8VJJsXICF883L8TRoT7tMuxLtUGE7adzMe61BzLjMIA0MXbDY8PCsG0mCB4uTmJF5CojbCwEJFVKdBU46f0fGw+fhVn1eWW55UujpgYHYCp/Tuhf4h9H0nQVNXgYNY17D9fjO0ZBdDqao+mOEglGNPbHzMHhUDVtaNd/xkQ3YyFhYis1ul8LbakX8WW41dRVK63PB/i5YqH+gRiSLeO6BvsafMztZrMAk7klWH/+WLsP1+M9NwymH/3t2tQBxc8HhuC6THB8PGQixeUSEQsLERk9UxmASlZJdh0PA87T6lRZTBZljlIJejVSYmBoR0woLMXBnTuAG936/9Qzy+rxm8XirH//DUcuHjNMk/NDd183TGsuzdGRfhhSNeOvL0BtXssLERkU6oMRvwvsxC/nCnE0cvXodbqblknzNsNAzrXFpiBnb3QuaOraKdPBEHAtQoDsq9VIvtaBc4UlOPAxWu4WFRRbz2FswOGdvfG8O4+GNbDB508XUTJS2StWFiIyGYJgoCrZdU4evk6jlwuxdHL13GusPyW9bzdndDDzwP+Smf4K5zhr3SGn6L2/wOUzujoLm/2ZcAVeiMuX6vEpWuVuFRcUVdQKpFdXGm5PcHvSSW1E7sN6+6D4T180CdIyVsWEN1Bcz6/bfukMRHZPIlEgqAOrgjq4Iop/ToBqB2weizn/wpMel4ZrlUYcK2i5LbvI5NK4Osht5QYbw8nCAJQYzLDaBJgqPuv0WyGwSTAaDKjxmRGjUlAjcmM4nJ9vTE2t+asHYfSxdsdYd5uGNTFC/d19eYl20RtRNQjLJ9++ineeecdqNVq9OnTBx9//DEGDRp019fxCAtR+6I3mnDqqhY5pZVQa/Qo1OpQoKmGWqtHoUaHonJdvQGuzeHt7oQu3m51D3eE+bghzNsNwV6unMWXqJls8gjLt99+iwULFmDlypWIjY3FBx98gDFjxuDcuXPw9fUVKxYRWSG5gwwxoR0QE9qhweVGkxnXKgxQa3VQa3Qo1OpQUqGHVCqBo0wKR5kEDlIpHB2kcKx7zkF2Y1nt/3dwrS0qShceMSGyRqIdYYmNjcXAgQPxySefAADMZjOCg4Pxwgsv4NVXX73ja3mEhYiIyPY05/NblNFhBoMBaWlpiIuL+78gUini4uKQkpJyy/p6vR5arbbeg4iIiNoPUQrLtWvXYDKZ4OfnV+95Pz8/qNXqW9ZPTEyEUqm0PIKDg9sqKhEREVkBm7j+LiEhARqNxvLIzc0VOxIRERG1IVEG3Xp7e0Mmk6GwsLDe84WFhfD3979lfblcDrnc+me9JCIiotYhyhEWJycnxMTEYM+ePZbnzGYz9uzZA5VKJUYkIiIismKiXda8YMECPPnkkxgwYAAGDRqEDz74AJWVlXj66afFikRERERWSrTC8uijj6K4uBhLliyBWq1G3759sXPnzlsG4hIRERHxXkJERETUJmxuHhYiIiKixmBhISIiIqvHwkJERERWj4WFiIiIrB4LCxEREVk90S5rbo4bFzbxJohERES248bndlMuULbJwlJeXg4AvAkiERGRDSovL4dSqWzUa2xyHhaz2Yz8/Hx4eHhAIpG06HtrtVoEBwcjNzfXbud4aQ/bCHA77Q230360h20EuJ0NEQQB5eXlCAwMhFTauFEpNnmERSqVIigoqFW/h0KhsOsfMKB9bCPA7bQ33E770R62EeB23qyxR1Zu4KBbIiIisnosLERERGT1WFhuIpfLsXTpUsjlcrGjtJr2sI0At9PecDvtR3vYRoDb2dJsctAtERERtS88wkJERERWj4WFiIiIrB4LCxEREVk9FhYiIiKyeu2usPzrX//CkCFD4OrqCk9PzwbXycnJwYQJE+Dq6gpfX18sWrQIRqPxju9bWlqKmTNnQqFQwNPTE3PmzEFFRUUrbEHj7du3DxKJpMHHkSNHbvu6ESNG3LL+vHnz2jB543Xu3PmWzMuWLbvja3Q6HeLj49GxY0e4u7tj2rRpKCwsbKPEjXf58mXMmTMHXbp0gYuLC7p27YqlS5fCYDDc8XW2sD8//fRTdO7cGc7OzoiNjcXhw4fvuP7GjRsREREBZ2dnREVFYfv27W2UtGkSExMxcOBAeHh4wNfXF1OmTMG5c+fu+Jo1a9bcst+cnZ3bKHHTvP7667dkjoiIuONrbG1fNvR3jUQiQXx8fIPr28p+3L9/PyZNmoTAwEBIJBJs2bKl3nJBELBkyRIEBATAxcUFcXFxuHDhwl3ft7G/2w1pd4XFYDBg+vTpeP755xtcbjKZMGHCBBgMBiQnJ+PLL7/EmjVrsGTJkju+78yZM5GZmYndu3dj27Zt2L9/P+bOndsam9BoQ4YMQUFBQb3HH//4R3Tp0gUDBgy442ufffbZeq9bvnx5G6VuujfffLNe5hdeeOGO67/88svYunUrNm7ciKSkJOTn52Pq1KltlLbxzp49C7PZjM8++wyZmZl4//33sXLlSvz1r3+962uteX9+++23WLBgAZYuXYpjx46hT58+GDNmDIqKihpcPzk5GY899hjmzJmD48ePY8qUKZgyZQpOnTrVxsnvXVJSEuLj43Ho0CHs3r0bNTU1GD16NCorK+/4OoVCUW+/XblypY0SN12vXr3qZT5w4MBt17XFfXnkyJF627d7924AwPTp02/7GlvYj5WVlejTpw8+/fTTBpcvX74cH330EVauXInU1FS4ublhzJgx0Ol0t33Pxv5u35bQTq1evVpQKpW3PL99+3ZBKpUKarXa8tyKFSsEhUIh6PX6Bt/r9OnTAgDhyJEjlud27NghSCQS4erVqy2evbkMBoPg4+MjvPnmm3dc7/777xdefPHFtgnVQkJDQ4X333//ntcvKysTHB0dhY0bN1qeO3PmjABASElJaYWErWP58uVCly5d7riOte/PQYMGCfHx8ZavTSaTEBgYKCQmJja4/h/+8AdhwoQJ9Z6LjY0VnnvuuVbN2ZKKiooEAEJSUtJt17nd31XWbOnSpUKfPn3ueX172Jcvvvii0LVrV8FsNje43Bb3IwBh8+bNlq/NZrPg7+8vvPPOO5bnysrKBLlcLnzzzTe3fZ/G/m7fTrs7wnI3KSkpiIqKgp+fn+W5MWPGQKvVIjMz87av8fT0rHe0Ii4uDlKpFKmpqa2eubF++uknlJSU4Omnn77ruuvWrYO3tzd69+6NhIQEVFVVtUHC5lm2bBk6duyIfv364Z133rnj6by0tDTU1NQgLi7O8lxERARCQkKQkpLSFnFbhEajgZeX113Xs9b9aTAYkJaWVm8/SKVSxMXF3XY/pKSk1FsfqP1dtbX9BuCu+66iogKhoaEIDg7G5MmTb/t3kTW5cOECAgMDERYWhpkzZyInJ+e269r6vjQYDFi7di2eeeaZO96Q1xb34+9lZ2dDrVbX21dKpRKxsbG33VdN+d2+HZu8+WFrUqvV9coKAMvXarX6tq/x9fWt95yDgwO8vLxu+xoxff755xgzZsxdbyD5+OOPIzQ0FIGBgTh58iQWL16Mc+fOYdOmTW2UtPH+/Oc/o3///vDy8kJycjISEhJQUFCA9957r8H11Wo1nJycbhnP5OfnZ5X7riEXL17Exx9/jHffffeO61nz/rx27RpMJlODv3tnz55t8DW3+121lf1mNpvx0ksv4b777kPv3r1vu154eDi++OILREdHQ6PR4N1338WQIUOQmZnZ6jeBbarY2FisWbMG4eHhKCgowBtvvIFhw4bh1KlT8PDwuGV9W9+XW7ZsQVlZGZ566qnbrmOL+/FmN/ZHY/ZVU363b8cuCsurr76Kt99++47rnDlz5q6DvmxNU7Y7Ly8Pu3btwnfffXfX9//9GJyoqCgEBARg1KhRyMrKQteuXZsevJEas50LFiywPBcdHQ0nJyc899xzSExMtPrpsZuyP69evYqxY8di+vTpePbZZ+/4WmvZn1QrPj4ep06duuPYDgBQqVRQqVSWr4cMGYKePXvis88+wz/+8Y/Wjtkk48aNs/x/dHQ0YmNjERoaiu+++w5z5swRMVnr+PzzzzFu3DgEBgbedh1b3I/Wxi4Ky8KFC+/YbAEgLCzsnt7L39//ltHLN64Y8ff3v+1rbh48ZDQaUVpaetvXtISmbPfq1avRsWNHPPTQQ43+frGxsQBq/0Xflh9wzdm/sbGxMBqNuHz5MsLDw29Z7u/vD4PBgLKysnpHWQoLC1t13zWksduZn5+PBx54AEOGDMGqVasa/f3E2p8N8fb2hkwmu+XqrDvtB39//0atb03mz59vGZzf2H9dOzo6ol+/frh48WIrpWt5np6e6NGjx20z2/K+vHLlCn755ZdGH6m0xf14Y38UFhYiICDA8nxhYSH69u3b4Gua8rt9W40a8WJH7jbotrCw0PLcZ599JigUCkGn0zX4XjcG3R49etTy3K5du6xu0K3ZbBa6dOkiLFy4sEmvP3DggABAOHHiRAsnaz1r164VpFKpUFpa2uDyG4Nuv//+e8tzZ8+etfpBt3l5eUL37t2FGTNmCEajsUnvYW37c9CgQcL8+fMtX5tMJqFTp053HHQ7ceLEes+pVCqrHqhpNpuF+Ph4ITAwUDh//nyT3sNoNArh4eHCyy+/3MLpWk95ebnQoUMH4cMPP2xwuS3uyxuWLl0q+Pv7CzU1NY16nS3sR9xm0O27775reU6j0dzToNvG/G7fNk+j1rYDV65cEY4fPy688cYbgru7u3D8+HHh+PHjQnl5uSAItT9EvXv3FkaPHi2kp6cLO3fuFHx8fISEhATLe6Smpgrh4eFCXl6e5bmxY8cK/fr1E1JTU4UDBw4I3bt3Fx577LE23747+eWXXwQAwpkzZ25ZlpeXJ4SHhwupqamCIAjCxYsXhTfffFM4evSokJ2dLfz4449CWFiYMHz48LaOfc+Sk5OF999/X0hPTxeysrKEtWvXCj4+PsLs2bMt69y8nYIgCPPmzRNCQkKEX3/9VTh69KigUqkElUolxibck7y8PKFbt27CqFGjhLy8PKGgoMDy+P06trY/N2zYIMjlcmHNmjXC6dOnhblz5wqenp6WK/ZmzZolvPrqq5b1Dx48KDg4OAjvvvuucObMGWHp0qWCo6OjkJGRIdYm3NXzzz8vKJVKYd++ffX2W1VVlWWdm7fzjTfeEHbt2iVkZWUJaWlpwowZMwRnZ2chMzNTjE24JwsXLhT27dsnZGdnCwcPHhTi4uIEb29voaioSBAE+9iXglD7wRsSEiIsXrz4lmW2uh/Ly8stn4sAhPfee084fvy4cOXKFUEQBGHZsmWCp6en8OOPPwonT54UJk+eLHTp0kWorq62vMfIkSOFjz/+2PL13X6371W7KyxPPvmkAOCWx969ey3rXL58WRg3bpzg4uIieHt7CwsXLqzXnvfu3SsAELKzsy3PlZSUCI899pjg7u4uKBQK4emnn7aUIGvx2GOPCUOGDGlwWXZ2dr0/h5ycHGH48OGCl5eXIJfLhW7dugmLFi0SNBpNGyZunLS0NCE2NlZQKpWCs7Oz0LNnT+Gtt96qd2Ts5u0UBEGorq4W/vSnPwkdOnQQXF1dhYcffrjeh7+1Wb16dYM/w78/YGqr+/Pjjz8WQkJCBCcnJ2HQoEHCoUOHLMvuv/9+4cknn6y3/nfffSf06NFDcHJyEnr16iX8/PPPbZy4cW6331avXm1Z5+btfOmllyx/Jn5+fsL48eOFY8eOtX34Rnj00UeFgIAAwcnJSejUqZPw6KOPChcvXrQst4d9KQi1R9IBCOfOnbtlma3uxxufbzc/bmyL2WwW/v73vwt+fn6CXC4XRo0adcv2h4aGCkuXLq333J1+t++VRBAEoXEnkYiIiIjaFudhISIiIqvHwkJERERWj4WFiIiIrB4LCxEREVk9FhYiIiKyeiwsREREZPVYWIiIiMjqsbAQERGR1WNhISIiIqvHwkJERERWj4WFiIiIrB4LCxEREVm9/w/F/0AA4enLqAAAAABJRU5ErkJggg==",
72
+ "text/plain": [
73
+ "<Figure size 640x480 with 1 Axes>"
74
+ ]
75
+ },
76
+ "metadata": {},
77
+ "output_type": "display_data"
78
+ }
79
+ ],
80
+ "source": [
81
+ "xs = np.arange(-10, 10, 0.5)\n",
82
+ "ys = f(xs)\n",
83
+ "plt.plot(xs, ys)"
84
+ ]
85
+ },
86
+ {
87
+ "attachments": {},
88
+ "cell_type": "markdown",
89
+ "metadata": {},
90
+ "source": [
91
+ "Now let's implement a derivative in code, since the actual expression is too difficult:"
92
+ ]
93
+ },
94
+ {
95
+ "cell_type": "code",
96
+ "execution_count": 10,
97
+ "metadata": {},
98
+ "outputs": [
99
+ {
100
+ "data": {
101
+ "text/plain": [
102
+ "0.0029999999995311555"
103
+ ]
104
+ },
105
+ "execution_count": 10,
106
+ "metadata": {},
107
+ "output_type": "execute_result"
108
+ }
109
+ ],
110
+ "source": [
111
+ "h = 10 ** -3\n",
112
+ "x = 2/3\n",
113
+ "(f(x + h) - f(x)) / h"
114
+ ]
115
+ },
116
+ {
117
+ "cell_type": "code",
118
+ "execution_count": 11,
119
+ "metadata": {},
120
+ "outputs": [
121
+ {
122
+ "name": "stdout",
123
+ "output_type": "stream",
124
+ "text": [
125
+ "4.0\n"
126
+ ]
127
+ }
128
+ ],
129
+ "source": [
130
+ "a = 2.0\n",
131
+ "b = -3.0\n",
132
+ "c = 10.0\n",
133
+ "d = a*b + c\n",
134
+ "print(d)"
135
+ ]
136
+ },
137
+ {
138
+ "cell_type": "code",
139
+ "execution_count": 13,
140
+ "metadata": {},
141
+ "outputs": [
142
+ {
143
+ "name": "stdout",
144
+ "output_type": "stream",
145
+ "text": [
146
+ "d1 4.0\n",
147
+ "d2 4.0002\n",
148
+ "slope 2.0000000000042206\n"
149
+ ]
150
+ }
151
+ ],
152
+ "source": [
153
+ "h = 0.0001\n",
154
+ "\n",
155
+ "# Random inputs\n",
156
+ "a = 2.0\n",
157
+ "b = -3.0\n",
158
+ "c= 10.0\n",
159
+ "\n",
160
+ "d1 = a*b + c\n",
161
+ "b += h\n",
162
+ "d2 = a*b + c\n",
163
+ "\n",
164
+ "print('d1', d1)\n",
165
+ "print('d2', d2)\n",
166
+ "print('slope', (d2 - d1) /h)"
167
+ ]
168
+ },
169
+ {
170
+ "attachments": {},
171
+ "cell_type": "markdown",
172
+ "metadata": {},
173
+ "source": [
174
+ "## Value object"
175
+ ]
176
+ },
177
+ {
178
+ "cell_type": "code",
179
+ "execution_count": 24,
180
+ "metadata": {},
181
+ "outputs": [],
182
+ "source": [
183
+ "class Value:\n",
184
+ " def __init__(self, data, _children=(), _op=''):\n",
185
+ " self.data = data\n",
186
+ " # Changing this variable doesn't change the loss\n",
187
+ " self.grad = 0.0\n",
188
+ " self._prev = set(_children)\n",
189
+ " self._op = _op\n",
190
+ " self._backward = lambda: None\n",
191
+ "\n",
192
+ " def __repr__(self):\n",
193
+ " return f\"Value(data={self.data})\"\n",
194
+ "\n",
195
+ " def __add__(self, other):\n",
196
+ " out = Value(self.data + other.data, (self, other), '+')\n",
197
+ " def _backward():\n",
198
+ " self.grad = 1.0 * out.grad\n",
199
+ " other.grad = 1.0 * out.grad\n",
200
+ " out._backward = _backward\n",
201
+ " return out\n",
202
+ "\n",
203
+ " def __mul__(self, other):\n",
204
+ " out = Value(self.data * other.data, (self, other), '*')\n",
205
+ "\n",
206
+ " def _backward(): \n",
207
+ " self.grad = other.data * out.grad\n",
208
+ " other.grad = self.data * out.grad\n",
209
+ " out._backward = _backward\n",
210
+ " return out\n",
211
+ "\n",
212
+ " def tanh(self):\n",
213
+ " n = self.data\n",
214
+ " t = (math.exp(2*n) - 1) / (math.exp (2*n + 1))\n",
215
+ " out = Value(t, (self, ), 'tanh')\n",
216
+ " def _backward():\n",
217
+ " # Local derivative \n",
218
+ " self.grad = (1 - t**2) * out.grad\n",
219
+ " \n",
220
+ " out._backward = _backward\n",
221
+ " return out\n"
222
+ ]
223
+ },
224
+ {
225
+ "attachments": {},
226
+ "cell_type": "markdown",
227
+ "metadata": {},
228
+ "source": [
229
+ "this is the alternative, and I do think it feels a bit sharper"
230
+ ]
231
+ },
232
+ {
233
+ "cell_type": "code",
234
+ "execution_count": 26,
235
+ "metadata": {},
236
+ "outputs": [
237
+ {
238
+ "data": {
239
+ "text/plain": [
240
+ "'+'"
241
+ ]
242
+ },
243
+ "execution_count": 26,
244
+ "metadata": {},
245
+ "output_type": "execute_result"
246
+ }
247
+ ],
248
+ "source": [
249
+ "a = Value(2.0)\n",
250
+ "b = Value(-3.0)\n",
251
+ "c = Value(10.0)\n",
252
+ "d = a*b + c\n",
253
+ "d._op"
254
+ ]
255
+ }
256
+ ],
257
+ "metadata": {
258
+ "kernelspec": {
259
+ "display_name": "venv",
260
+ "language": "python",
261
+ "name": "python3"
262
+ },
263
+ "language_info": {
264
+ "codemirror_mode": {
265
+ "name": "ipython",
266
+ "version": 3
267
+ },
268
+ "file_extension": ".py",
269
+ "mimetype": "text/x-python",
270
+ "name": "python",
271
+ "nbconvert_exporter": "python",
272
+ "pygments_lexer": "ipython3",
273
+ "version": "3.10.9 (main, Dec 19 2022, 17:35:49) [GCC 12.2.0]"
274
+ },
275
+ "orig_nbformat": 4,
276
+ "vscode": {
277
+ "interpreter": {
278
+ "hash": "632cf67f3dc7ae5e2d87dcad018c0f1b1cd2d0aa8b91fc5df1dd41dbdb2fa25c"
279
+ }
280
+ }
281
+ },
282
+ "nbformat": 4,
283
+ "nbformat_minor": 2
284
+ }
readme.md ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ # Neural Networks: Zero to Hero
2
+
3
+ This repository contains my personal implementation of neural network models, based on Andrej Karpathy's [Neural Networks: Zero to Hero](https://www.youtube.com/playlist?list=PLAqhIrjkxbuWI23v9cThsA9GvCAUhRvKZ) tutorials on the basics of neural nets and transformers. The code includes models for bigram language modeling, CNN-based natural language processing, and transformer models. These models are designed to help me understand the basic concepts of neural networks and their applications in NLP.
4
+
5
+ This code is heavily inspired by Karpathy's own [nn-zero-to-hero code](https://github.com/karpathy/nn-zero-to-hero), but is not a direct fork. The code is written in Jupyter notebooks and uses the PyTorch library for building and training the models. Each model is implemented as a standalone notebook and can be run independently.
6
+
7
+ Please note that this code is not intended for production use (obviously) and should be used for educational purposes only. If you find any bugs or have suggestions for improvement, please feel free to contribute!