diff --git "a/training.ipynb" "b/training.ipynb" --- "a/training.ipynb" +++ "b/training.ipynb" @@ -849,12 +849,528 @@ "print(output)" ] }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# 3. Fine-tuning a pretrained model" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Some weights of the model checkpoint at bert-base-uncased were not used when initializing BertForSequenceClassification: ['cls.seq_relationship.weight', 'cls.predictions.transform.LayerNorm.bias', 'cls.predictions.transform.LayerNorm.weight', 'cls.predictions.transform.dense.bias', 'cls.predictions.transform.dense.weight', 'cls.seq_relationship.bias', 'cls.predictions.bias']\n", + "- This IS expected if you are initializing BertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", + "- This IS NOT expected if you are initializing BertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", + "Some weights of BertForSequenceClassification were not initialized from the model checkpoint at bert-base-uncased and are newly initialized: ['classifier.bias', 'classifier.weight']\n", + "You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", + "/Users/florentiana.yuwono/anaconda3/lib/python3.10/site-packages/transformers/optimization.py:411: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n", + " warnings.warn(\n" + ] + } + ], + "source": [ + "import torch\n", + "from transformers import AdamW, AutoTokenizer, AutoModelForSequenceClassification\n", + "\n", + "checkpoint = \"bert-base-uncased\"\n", + "tokenizer = AutoTokenizer.from_pretrained(checkpoint)\n", + "model = AutoModelForSequenceClassification.from_pretrained(checkpoint)\n", + "sequences = [\n", + " \"I've been waiting for a course for my whole life.\",\n", + " \"This course is amazing!\"\n", + "]\n", + "batch = tokenizer(sequences, padding=True, truncation=True, return_tensors=\"pt\")\n", + "\n", + "batch[\"labels\"] = torch.tensor([1, 1])\n", + "optimizer = AdamW(model.parameters())\n", + "loss = model(**batch).loss\n", + "loss.backward()\n", + "optimizer.step()" + ] + }, { "cell_type": "code", - "execution_count": null, + "execution_count": 39, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "c7129bfda8404178bba7d5c23ac3c67b", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Downloading builder script: 0%| | 0.00/28.8k [00:00