{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "-e84g1YoseoE"
   },
   "source": [
    "# TextAttack End-to-End\n",
    "\n",
    "This tutorial provides a broad end-to-end overview of training, evaluating, and attacking a model using TextAttack."
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "pGv59SZzseoG"
   },
   "source": [
    "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/QData/TextAttack/blob/master/docs/2notebook/0_End_to_End.ipynb)\n",
    "\n",
    "[![View Source on GitHub](https://img.shields.io/badge/github-view%20source-black.svg)](https://github.com/QData/TextAttack/blob/master/docs/2notebook/0_End_to_End.ipynb)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "AQTkpf9RslEA",
    "outputId": "20172655-e6ce-46c0-ba57-cb3a7f955ee3"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Collecting textattack[tensorflow]\n",
      "  Downloading textattack-0.3.3-py3-none-any.whl (361 kB)\n",
      "\u001b[K     |████████████████████████████████| 361 kB 4.1 MB/s \n",
      "\u001b[?25hCollecting language-tool-python\n",
      "  Downloading language_tool_python-2.6.1-py3-none-any.whl (30 kB)\n",
      "Collecting lemminflect\n",
      "  Downloading lemminflect-0.2.2-py3-none-any.whl (769 kB)\n",
      "\u001b[K     |████████████████████████████████| 769 kB 36.0 MB/s \n",
      "\u001b[?25hCollecting word2number\n",
      "  Downloading word2number-1.1.zip (9.7 kB)\n",
      "Collecting transformers>=3.3.0\n",
      "  Downloading transformers-4.11.3-py3-none-any.whl (2.9 MB)\n",
      "\u001b[K     |████████████████████████████████| 2.9 MB 37.9 MB/s \n",
      "\u001b[?25hCollecting flair\n",
      "  Downloading flair-0.9-py3-none-any.whl (319 kB)\n",
      "\u001b[K     |████████████████████████████████| 319 kB 50.4 MB/s \n",
      "\u001b[?25hRequirement already satisfied: scipy>=1.4.1 in /usr/local/lib/python3.7/dist-packages (from textattack[tensorflow]) (1.4.1)\n",
      "Collecting terminaltables\n",
      "  Downloading terminaltables-3.1.0.tar.gz (12 kB)\n",
      "Requirement already satisfied: more-itertools in /usr/local/lib/python3.7/dist-packages (from textattack[tensorflow]) (8.10.0)\n",
      "Requirement already satisfied: editdistance in /usr/local/lib/python3.7/dist-packages (from textattack[tensorflow]) (0.5.3)\n",
      "Collecting bert-score>=0.3.5\n",
      "  Downloading bert_score-0.3.10-py3-none-any.whl (59 kB)\n",
      "\u001b[K     |████████████████████████████████| 59 kB 7.1 MB/s \n",
      "\u001b[?25hRequirement already satisfied: torch!=1.8,>=1.7.0 in /usr/local/lib/python3.7/dist-packages (from textattack[tensorflow]) (1.9.0+cu111)\n",
      "Requirement already satisfied: nltk in /usr/local/lib/python3.7/dist-packages (from textattack[tensorflow]) (3.2.5)\n",
      "Collecting num2words\n",
      "  Downloading num2words-0.5.10-py3-none-any.whl (101 kB)\n",
      "\u001b[K     |████████████████████████████████| 101 kB 11.5 MB/s \n",
      "\u001b[?25hCollecting datasets\n",
      "  Downloading datasets-1.13.0-py3-none-any.whl (285 kB)\n",
      "\u001b[K     |████████████████████████████████| 285 kB 53.3 MB/s \n",
      "\u001b[?25hCollecting lru-dict\n",
      "  Downloading lru-dict-1.1.7.tar.gz (10 kB)\n",
      "Requirement already satisfied: pandas>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from textattack[tensorflow]) (1.1.5)\n",
      "Collecting tqdm<4.50.0,>=4.27\n",
      "  Downloading tqdm-4.49.0-py2.py3-none-any.whl (69 kB)\n",
      "\u001b[K     |████████████████████████████████| 69 kB 7.5 MB/s \n",
      "\u001b[?25hRequirement already satisfied: PySocks!=1.5.7,>=1.5.6 in /usr/local/lib/python3.7/dist-packages (from textattack[tensorflow]) (1.7.1)\n",
      "Requirement already satisfied: filelock in /usr/local/lib/python3.7/dist-packages (from textattack[tensorflow]) (3.3.0)\n",
      "Requirement already satisfied: numpy>=1.19.2 in /usr/local/lib/python3.7/dist-packages (from textattack[tensorflow]) (1.19.5)\n",
      "Requirement already satisfied: tensorflow>=2 in /usr/local/lib/python3.7/dist-packages (from textattack[tensorflow]) (2.6.0)\n",
      "Requirement already satisfied: tensorflow-hub in /usr/local/lib/python3.7/dist-packages (from textattack[tensorflow]) (0.12.0)\n",
      "Collecting tensorboardX\n",
      "  Downloading tensorboardX-2.4-py2.py3-none-any.whl (124 kB)\n",
      "\u001b[K     |████████████████████████████████| 124 kB 45.0 MB/s \n",
      "\u001b[?25hCollecting tensorflow-text>=2\n",
      "  Downloading tensorflow_text-2.6.0-cp37-cp37m-manylinux1_x86_64.whl (4.4 MB)\n",
      "\u001b[K     |████████████████████████████████| 4.4 MB 38.7 MB/s \n",
      "\u001b[?25hRequirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from bert-score>=0.3.5->textattack[tensorflow]) (3.2.2)\n",
      "Requirement already satisfied: packaging>=20.9 in /usr/local/lib/python3.7/dist-packages (from bert-score>=0.3.5->textattack[tensorflow]) (21.0)\n",
      "Requirement already satisfied: requests in /usr/local/lib/python3.7/dist-packages (from bert-score>=0.3.5->textattack[tensorflow]) (2.23.0)\n",
      "Requirement already satisfied: pyparsing>=2.0.2 in /usr/local/lib/python3.7/dist-packages (from packaging>=20.9->bert-score>=0.3.5->textattack[tensorflow]) (2.4.7)\n",
      "Requirement already satisfied: python-dateutil>=2.7.3 in /usr/local/lib/python3.7/dist-packages (from pandas>=1.0.1->textattack[tensorflow]) (2.8.2)\n",
      "Requirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.7/dist-packages (from pandas>=1.0.1->textattack[tensorflow]) (2018.9)\n",
      "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.7/dist-packages (from python-dateutil>=2.7.3->pandas>=1.0.1->textattack[tensorflow]) (1.15.0)\n",
      "Requirement already satisfied: flatbuffers~=1.12.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2->textattack[tensorflow]) (1.12)\n",
      "Requirement already satisfied: absl-py~=0.10 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2->textattack[tensorflow]) (0.12.0)\n",
      "Requirement already satisfied: google-pasta~=0.2 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2->textattack[tensorflow]) (0.2.0)\n",
      "Requirement already satisfied: h5py~=3.1.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2->textattack[tensorflow]) (3.1.0)\n",
      "Requirement already satisfied: typing-extensions~=3.7.4 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2->textattack[tensorflow]) (3.7.4.3)\n",
      "Requirement already satisfied: tensorflow-estimator~=2.6 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2->textattack[tensorflow]) (2.6.0)\n",
      "Requirement already satisfied: termcolor~=1.1.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2->textattack[tensorflow]) (1.1.0)\n",
      "Requirement already satisfied: wrapt~=1.12.1 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2->textattack[tensorflow]) (1.12.1)\n",
      "Requirement already satisfied: keras-preprocessing~=1.1.2 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2->textattack[tensorflow]) (1.1.2)\n",
      "Requirement already satisfied: astunparse~=1.6.3 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2->textattack[tensorflow]) (1.6.3)\n",
      "Requirement already satisfied: gast==0.4.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2->textattack[tensorflow]) (0.4.0)\n",
      "Requirement already satisfied: tensorboard~=2.6 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2->textattack[tensorflow]) (2.6.0)\n",
      "Requirement already satisfied: protobuf>=3.9.2 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2->textattack[tensorflow]) (3.17.3)\n",
      "Requirement already satisfied: clang~=5.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2->textattack[tensorflow]) (5.0)\n",
      "Requirement already satisfied: keras~=2.6 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2->textattack[tensorflow]) (2.6.0)\n",
      "Requirement already satisfied: grpcio<2.0,>=1.37.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2->textattack[tensorflow]) (1.41.0)\n",
      "Requirement already satisfied: wheel~=0.35 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2->textattack[tensorflow]) (0.37.0)\n",
      "Requirement already satisfied: opt-einsum~=3.3.0 in /usr/local/lib/python3.7/dist-packages (from tensorflow>=2->textattack[tensorflow]) (3.3.0)\n",
      "Requirement already satisfied: cached-property in /usr/local/lib/python3.7/dist-packages (from h5py~=3.1.0->tensorflow>=2->textattack[tensorflow]) (1.5.2)\n",
      "Requirement already satisfied: google-auth<2,>=1.6.3 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow>=2->textattack[tensorflow]) (1.35.0)\n",
      "Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow>=2->textattack[tensorflow]) (3.3.4)\n",
      "Requirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow>=2->textattack[tensorflow]) (0.4.6)\n",
      "Requirement already satisfied: setuptools>=41.0.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow>=2->textattack[tensorflow]) (57.4.0)\n",
      "Requirement already satisfied: tensorboard-data-server<0.7.0,>=0.6.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow>=2->textattack[tensorflow]) (0.6.1)\n",
      "Requirement already satisfied: tensorboard-plugin-wit>=1.6.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow>=2->textattack[tensorflow]) (1.8.0)\n",
      "Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.7/dist-packages (from tensorboard~=2.6->tensorflow>=2->textattack[tensorflow]) (1.0.1)\n",
      "Requirement already satisfied: rsa<5,>=3.1.4 in /usr/local/lib/python3.7/dist-packages (from google-auth<2,>=1.6.3->tensorboard~=2.6->tensorflow>=2->textattack[tensorflow]) (4.7.2)\n",
      "Requirement already satisfied: cachetools<5.0,>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from google-auth<2,>=1.6.3->tensorboard~=2.6->tensorflow>=2->textattack[tensorflow]) (4.2.4)\n",
      "Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.7/dist-packages (from google-auth<2,>=1.6.3->tensorboard~=2.6->tensorflow>=2->textattack[tensorflow]) (0.2.8)\n",
      "Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.7/dist-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard~=2.6->tensorflow>=2->textattack[tensorflow]) (1.3.0)\n",
      "Requirement already satisfied: importlib-metadata in /usr/local/lib/python3.7/dist-packages (from markdown>=2.6.8->tensorboard~=2.6->tensorflow>=2->textattack[tensorflow]) (4.8.1)\n",
      "Requirement already satisfied: pyasn1<0.5.0,>=0.4.6 in /usr/local/lib/python3.7/dist-packages (from pyasn1-modules>=0.2.1->google-auth<2,>=1.6.3->tensorboard~=2.6->tensorflow>=2->textattack[tensorflow]) (0.4.8)\n",
      "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->bert-score>=0.3.5->textattack[tensorflow]) (1.24.3)\n",
      "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->bert-score>=0.3.5->textattack[tensorflow]) (2.10)\n",
      "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->bert-score>=0.3.5->textattack[tensorflow]) (2021.5.30)\n",
      "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->bert-score>=0.3.5->textattack[tensorflow]) (3.0.4)\n",
      "Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.7/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard~=2.6->tensorflow>=2->textattack[tensorflow]) (3.1.1)\n",
      "Collecting sacremoses\n",
      "  Downloading sacremoses-0.0.46-py3-none-any.whl (895 kB)\n",
      "\u001b[K     |████████████████████████████████| 895 kB 38.8 MB/s \n",
      "\u001b[?25hCollecting pyyaml>=5.1\n",
      "  Downloading PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl (636 kB)\n",
      "\u001b[K     |████████████████████████████████| 636 kB 30.6 MB/s \n",
      "\u001b[?25hRequirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.7/dist-packages (from transformers>=3.3.0->textattack[tensorflow]) (2019.12.20)\n",
      "Collecting huggingface-hub>=0.0.17\n",
      "  Downloading huggingface_hub-0.0.19-py3-none-any.whl (56 kB)\n",
      "\u001b[K     |████████████████████████████████| 56 kB 5.2 MB/s \n",
      "\u001b[?25hCollecting tokenizers<0.11,>=0.10.1\n",
      "  Downloading tokenizers-0.10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (3.3 MB)\n",
      "\u001b[K     |████████████████████████████████| 3.3 MB 39.6 MB/s \n",
      "\u001b[?25hCollecting xxhash\n",
      "  Downloading xxhash-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl (243 kB)\n",
      "\u001b[K     |████████████████████████████████| 243 kB 54.2 MB/s \n",
      "\u001b[?25hCollecting aiohttp\n",
      "  Downloading aiohttp-3.7.4.post0-cp37-cp37m-manylinux2014_x86_64.whl (1.3 MB)\n",
      "\u001b[K     |████████████████████████████████| 1.3 MB 49.0 MB/s \n",
      "\u001b[?25hRequirement already satisfied: multiprocess in /usr/local/lib/python3.7/dist-packages (from datasets->textattack[tensorflow]) (0.70.12.2)\n",
      "Collecting fsspec[http]>=2021.05.0\n",
      "  Downloading fsspec-2021.10.0-py3-none-any.whl (125 kB)\n",
      "\u001b[K     |████████████████████████████████| 125 kB 44.3 MB/s \n",
      "\u001b[?25hRequirement already satisfied: dill in /usr/local/lib/python3.7/dist-packages (from datasets->textattack[tensorflow]) (0.3.4)\n",
      "Collecting datasets\n",
      "  Downloading datasets-1.12.1-py3-none-any.whl (270 kB)\n",
      "\u001b[K     |████████████████████████████████| 270 kB 52.8 MB/s \n",
      "\u001b[?25h  Downloading datasets-1.12.0-py3-none-any.whl (269 kB)\n",
      "\u001b[K     |████████████████████████████████| 269 kB 51.8 MB/s \n",
      "\u001b[?25h  Downloading datasets-1.11.0-py3-none-any.whl (264 kB)\n",
      "\u001b[K     |████████████████████████████████| 264 kB 50.7 MB/s \n",
      "\u001b[?25hRequirement already satisfied: pyarrow!=4.0.0,>=1.0.0 in /usr/local/lib/python3.7/dist-packages (from datasets->textattack[tensorflow]) (3.0.0)\n",
      "Collecting sqlitedict>=1.6.0\n",
      "  Downloading sqlitedict-1.7.0.tar.gz (28 kB)\n",
      "Collecting more-itertools\n",
      "  Downloading more_itertools-8.8.0-py3-none-any.whl (48 kB)\n",
      "\u001b[K     |████████████████████████████████| 48 kB 5.4 MB/s \n",
      "\u001b[?25hCollecting conllu>=4.0\n",
      "  Downloading conllu-4.4.1-py2.py3-none-any.whl (15 kB)\n",
      "Requirement already satisfied: scikit-learn>=0.21.3 in /usr/local/lib/python3.7/dist-packages (from flair->textattack[tensorflow]) (0.22.2.post1)\n",
      "Collecting segtok>=1.5.7\n",
      "  Downloading segtok-1.5.10.tar.gz (25 kB)\n",
      "Collecting langdetect\n",
      "  Downloading langdetect-1.0.9.tar.gz (981 kB)\n",
      "\u001b[K     |████████████████████████████████| 981 kB 30.2 MB/s \n",
      "\u001b[?25hRequirement already satisfied: gensim<=3.8.3,>=3.4.0 in /usr/local/lib/python3.7/dist-packages (from flair->textattack[tensorflow]) (3.6.0)\n",
      "Collecting sentencepiece==0.1.95\n",
      "  Downloading sentencepiece-0.1.95-cp37-cp37m-manylinux2014_x86_64.whl (1.2 MB)\n",
      "\u001b[K     |████████████████████████████████| 1.2 MB 39.0 MB/s \n",
      "\u001b[?25hCollecting gdown==3.12.2\n",
      "  Downloading gdown-3.12.2.tar.gz (8.2 kB)\n",
      "  Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n",
      "  Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n",
      "    Preparing wheel metadata ... \u001b[?25l\u001b[?25hdone\n",
      "Collecting mpld3==0.3\n",
      "  Downloading mpld3-0.3.tar.gz (788 kB)\n",
      "\u001b[K     |████████████████████████████████| 788 kB 33.7 MB/s \n",
      "\u001b[?25hCollecting bpemb>=0.3.2\n",
      "  Downloading bpemb-0.3.3-py3-none-any.whl (19 kB)\n",
      "Collecting janome\n",
      "  Downloading Janome-0.4.1-py2.py3-none-any.whl (19.7 MB)\n",
      "\u001b[K     |████████████████████████████████| 19.7 MB 50 kB/s \n",
      "\u001b[?25hRequirement already satisfied: tabulate in /usr/local/lib/python3.7/dist-packages (from flair->textattack[tensorflow]) (0.8.9)\n",
      "Requirement already satisfied: hyperopt>=0.1.1 in /usr/local/lib/python3.7/dist-packages (from flair->textattack[tensorflow]) (0.1.2)\n",
      "Collecting ftfy\n",
      "  Downloading ftfy-6.0.3.tar.gz (64 kB)\n",
      "\u001b[K     |████████████████████████████████| 64 kB 2.9 MB/s \n",
      "\u001b[?25hCollecting deprecated>=1.2.4\n",
      "  Downloading Deprecated-1.2.13-py2.py3-none-any.whl (9.6 kB)\n",
      "Collecting konoha<5.0.0,>=4.0.0\n",
      "  Downloading konoha-4.6.5-py3-none-any.whl (20 kB)\n",
      "Collecting wikipedia-api\n",
      "  Downloading Wikipedia-API-0.5.4.tar.gz (18 kB)\n",
      "Requirement already satisfied: lxml in /usr/local/lib/python3.7/dist-packages (from flair->textattack[tensorflow]) (4.2.6)\n",
      "Requirement already satisfied: smart-open>=1.2.1 in /usr/local/lib/python3.7/dist-packages (from gensim<=3.8.3,>=3.4.0->flair->textattack[tensorflow]) (5.2.1)\n",
      "Requirement already satisfied: pymongo in /usr/local/lib/python3.7/dist-packages (from hyperopt>=0.1.1->flair->textattack[tensorflow]) (3.12.0)\n",
      "Requirement already satisfied: future in /usr/local/lib/python3.7/dist-packages (from hyperopt>=0.1.1->flair->textattack[tensorflow]) (0.16.0)\n",
      "Requirement already satisfied: networkx in /usr/local/lib/python3.7/dist-packages (from hyperopt>=0.1.1->flair->textattack[tensorflow]) (2.6.3)\n",
      "Collecting importlib-metadata\n",
      "  Downloading importlib_metadata-3.10.1-py3-none-any.whl (14 kB)\n",
      "Collecting requests\n",
      "  Downloading requests-2.26.0-py2.py3-none-any.whl (62 kB)\n",
      "\u001b[K     |████████████████████████████████| 62 kB 851 kB/s \n",
      "\u001b[?25hCollecting overrides<4.0.0,>=3.0.0\n",
      "  Downloading overrides-3.1.0.tar.gz (11 kB)\n",
      "Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.7/dist-packages (from importlib-metadata->markdown>=2.6.8->tensorboard~=2.6->tensorflow>=2->textattack[tensorflow]) (3.6.0)\n",
      "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->bert-score>=0.3.5->textattack[tensorflow]) (0.10.0)\n",
      "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->bert-score>=0.3.5->textattack[tensorflow]) (1.3.2)\n",
      "Requirement already satisfied: charset-normalizer~=2.0.0 in /usr/local/lib/python3.7/dist-packages (from requests->bert-score>=0.3.5->textattack[tensorflow]) (2.0.6)\n",
      "Requirement already satisfied: joblib>=0.11 in /usr/local/lib/python3.7/dist-packages (from scikit-learn>=0.21.3->flair->textattack[tensorflow]) (1.0.1)\n",
      "Requirement already satisfied: wcwidth in /usr/local/lib/python3.7/dist-packages (from ftfy->flair->textattack[tensorflow]) (0.2.5)\n",
      "Requirement already satisfied: docopt>=0.6.2 in /usr/local/lib/python3.7/dist-packages (from num2words->textattack[tensorflow]) (0.6.2)\n",
      "Requirement already satisfied: click in /usr/local/lib/python3.7/dist-packages (from sacremoses->transformers>=3.3.0->textattack[tensorflow]) (7.1.2)\n",
      "Building wheels for collected packages: gdown, mpld3, overrides, segtok, sqlitedict, ftfy, langdetect, lru-dict, terminaltables, wikipedia-api, word2number\n",
      "  Building wheel for gdown (PEP 517) ... \u001b[?25l\u001b[?25hdone\n",
      "  Created wheel for gdown: filename=gdown-3.12.2-py3-none-any.whl size=9704 sha256=be76d15e41eb103e46e0ba0d72cc277d90b7699bae860782279f5ee6ed86247b\n",
      "  Stored in directory: /root/.cache/pip/wheels/ba/e0/7e/726e872a53f7358b4b96a9975b04e98113b005cd8609a63abc\n",
      "  Building wheel for mpld3 (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
      "  Created wheel for mpld3: filename=mpld3-0.3-py3-none-any.whl size=116702 sha256=729d5e596c97fd6b6855617ae575e2f299e4c3883055d82c82cd0e9f6dceebb2\n",
      "  Stored in directory: /root/.cache/pip/wheels/26/70/6a/1c79e59951a41b4045497da187b2724f5659ca64033cf4548e\n",
      "  Building wheel for overrides (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
      "  Created wheel for overrides: filename=overrides-3.1.0-py3-none-any.whl size=10186 sha256=7dfa089a9708c2250125f86ed7e62bb443bdbf3d555ba72acf5e94c175dbdde6\n",
      "  Stored in directory: /root/.cache/pip/wheels/3a/0d/38/01a9bc6e20dcfaf0a6a7b552d03137558ba1c38aea47644682\n",
      "  Building wheel for segtok (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
      "  Created wheel for segtok: filename=segtok-1.5.10-py3-none-any.whl size=25030 sha256=e77700c3b756745ae1e92f298016bf2f0b539bdfd6b5a90ee918e18139587df2\n",
      "  Stored in directory: /root/.cache/pip/wheels/67/b7/d0/a121106e61339eee5ed083bc230b1c8dc422c49a5a28c2addd\n",
      "  Building wheel for sqlitedict (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
      "  Created wheel for sqlitedict: filename=sqlitedict-1.7.0-py3-none-any.whl size=14392 sha256=259e857c9f3cfc72c0c8b50d931b88b79d4f5382b8a70275d203a89427ffbb14\n",
      "  Stored in directory: /root/.cache/pip/wheels/af/94/06/18c0e83e9e227da8f3582810b51f319bbfd181e508676a56c8\n",
      "  Building wheel for ftfy (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
      "  Created wheel for ftfy: filename=ftfy-6.0.3-py3-none-any.whl size=41933 sha256=c66614666a8be4942d16fab87ee11128fc03fa8424fc0a12c0f3600801fa687e\n",
      "  Stored in directory: /root/.cache/pip/wheels/19/f5/38/273eb3b5e76dfd850619312f693716ac4518b498f5ffb6f56d\n",
      "  Building wheel for langdetect (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
      "  Created wheel for langdetect: filename=langdetect-1.0.9-py3-none-any.whl size=993242 sha256=553fa791a31538b8828322bde06203789fc30bbbdf4bb0c80a7d7003632aa0ae\n",
      "  Stored in directory: /root/.cache/pip/wheels/c5/96/8a/f90c59ed25d75e50a8c10a1b1c2d4c402e4dacfa87f3aff36a\n",
      "  Building wheel for lru-dict (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
      "  Created wheel for lru-dict: filename=lru_dict-1.1.7-cp37-cp37m-linux_x86_64.whl size=28412 sha256=aef5a191c8f01db62a275cc05e3befaee153aaaabd41f41aa2d724b4678b60ec\n",
      "  Stored in directory: /root/.cache/pip/wheels/9d/0b/4e/aa8fec9833090cd52bcd76f92f9d95e1ee7b915c12093663b4\n",
      "  Building wheel for terminaltables (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
      "  Created wheel for terminaltables: filename=terminaltables-3.1.0-py3-none-any.whl size=15354 sha256=03639ca3ebb53caeaa19ad643c2149cb1884638f368b72fd18e53b90a7d52d1b\n",
      "  Stored in directory: /root/.cache/pip/wheels/ba/ad/c8/2d98360791161cd3db6daf6b5e730f34021fc9367d5879f497\n",
      "  Building wheel for wikipedia-api (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
      "  Created wheel for wikipedia-api: filename=Wikipedia_API-0.5.4-py3-none-any.whl size=13475 sha256=a8fb9efb0f94278251a1983fbd3d07e4fff610ef8ea1205bb2433a4866b79b15\n",
      "  Stored in directory: /root/.cache/pip/wheels/d3/24/56/58ba93cf78be162451144e7a9889603f437976ef1ae7013d04\n",
      "  Building wheel for word2number (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
      "  Created wheel for word2number: filename=word2number-1.1-py3-none-any.whl size=5580 sha256=cee3a246b5d687cb221e19e789bdcc409ea1f97d9c59d66a6a205c0f3c62457d\n",
      "  Stored in directory: /root/.cache/pip/wheels/4b/c3/77/a5f48aeb0d3efb7cd5ad61cbd3da30bbf9ffc9662b07c9f879\n",
      "Successfully built gdown mpld3 overrides segtok sqlitedict ftfy langdetect lru-dict terminaltables wikipedia-api word2number\n",
      "Installing collected packages: requests, tqdm, pyyaml, importlib-metadata, tokenizers, sentencepiece, sacremoses, overrides, huggingface-hub, xxhash, wikipedia-api, transformers, sqlitedict, segtok, mpld3, more-itertools, langdetect, konoha, janome, gdown, ftfy, fsspec, deprecated, conllu, bpemb, word2number, terminaltables, num2words, lru-dict, lemminflect, language-tool-python, flair, datasets, bert-score, textattack, tensorflow-text, tensorboardX\n",
      "  Attempting uninstall: requests\n",
      "    Found existing installation: requests 2.23.0\n",
      "    Uninstalling requests-2.23.0:\n",
      "      Successfully uninstalled requests-2.23.0\n",
      "  Attempting uninstall: tqdm\n",
      "    Found existing installation: tqdm 4.62.3\n",
      "    Uninstalling tqdm-4.62.3:\n",
      "      Successfully uninstalled tqdm-4.62.3\n",
      "  Attempting uninstall: pyyaml\n",
      "    Found existing installation: PyYAML 3.13\n",
      "    Uninstalling PyYAML-3.13:\n",
      "      Successfully uninstalled PyYAML-3.13\n",
      "  Attempting uninstall: importlib-metadata\n",
      "    Found existing installation: importlib-metadata 4.8.1\n",
      "    Uninstalling importlib-metadata-4.8.1:\n",
      "      Successfully uninstalled importlib-metadata-4.8.1\n",
      "  Attempting uninstall: more-itertools\n",
      "    Found existing installation: more-itertools 8.10.0\n",
      "    Uninstalling more-itertools-8.10.0:\n",
      "      Successfully uninstalled more-itertools-8.10.0\n",
      "  Attempting uninstall: gdown\n",
      "    Found existing installation: gdown 3.6.4\n",
      "    Uninstalling gdown-3.6.4:\n",
      "      Successfully uninstalled gdown-3.6.4\n",
      "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n",
      "google-colab 1.0.0 requires requests~=2.23.0, but you have requests 2.26.0 which is incompatible.\n",
      "datascience 0.10.6 requires folium==0.2.1, but you have folium 0.8.3 which is incompatible.\u001b[0m\n",
      "Successfully installed bert-score-0.3.10 bpemb-0.3.3 conllu-4.4.1 datasets-1.11.0 deprecated-1.2.13 flair-0.9 fsspec-2021.10.0 ftfy-6.0.3 gdown-3.12.2 huggingface-hub-0.0.19 importlib-metadata-3.10.1 janome-0.4.1 konoha-4.6.5 langdetect-1.0.9 language-tool-python-2.6.1 lemminflect-0.2.2 lru-dict-1.1.7 more-itertools-8.8.0 mpld3-0.3 num2words-0.5.10 overrides-3.1.0 pyyaml-5.4.1 requests-2.26.0 sacremoses-0.0.46 segtok-1.5.10 sentencepiece-0.1.95 sqlitedict-1.7.0 tensorboardX-2.4 tensorflow-text-2.6.0 terminaltables-3.1.0 textattack-0.3.3 tokenizers-0.10.3 tqdm-4.49.0 transformers-4.11.3 wikipedia-api-0.5.4 word2number-1.1 xxhash-2.0.2\n"
     ]
    }
   ],
   "source": [
    "!pip3 install textattack[tensorflow]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "ONayD5EJseoG"
   },
   "source": [
    "## Training\n",
    "\n",
    "First, we're going to train a model. TextAttack integrates directly with [transformers](https://github.com/huggingface/transformers/) and [datasets](https://github.com/huggingface/datasets) to train any of the `transformers` pre-trained models on datasets from `datasets`. \n",
    "\n",
    "Let's use the Rotten Tomatoes Movie Review dataset: it's relatively short , and showcasesthe key features of `textattack train`. Let's take a look at the dataset using `textattack peek-dataset`:"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "spS2eW5WseoG",
    "outputId": "795de4af-18b2-4750-d817-a03959c4cd25"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\u001b[34;1mtextattack\u001b[0m: Updating TextAttack package dependencies.\n",
      "\u001b[34;1mtextattack\u001b[0m: Downloading NLTK required packages.\n",
      "[nltk_data] Downloading package averaged_perceptron_tagger to\n",
      "[nltk_data]     /root/nltk_data...\n",
      "[nltk_data]   Unzipping taggers/averaged_perceptron_tagger.zip.\n",
      "[nltk_data] Downloading package stopwords to /root/nltk_data...\n",
      "[nltk_data]   Unzipping corpora/stopwords.zip.\n",
      "[nltk_data] Downloading package omw to /root/nltk_data...\n",
      "[nltk_data]   Unzipping corpora/omw.zip.\n",
      "[nltk_data] Downloading package universal_tagset to /root/nltk_data...\n",
      "[nltk_data]   Unzipping taggers/universal_tagset.zip.\n",
      "[nltk_data] Downloading package wordnet to /root/nltk_data...\n",
      "[nltk_data]   Unzipping corpora/wordnet.zip.\n",
      "[nltk_data] Downloading package punkt to /root/nltk_data...\n",
      "[nltk_data]   Unzipping tokenizers/punkt.zip.\n",
      "\u001b[34;1mtextattack\u001b[0m: Downloading https://textattack.s3.amazonaws.com/word_embeddings/paragramcf.\n",
      "100% 481M/481M [00:39<00:00, 12.3MB/s]\n",
      "\u001b[34;1mtextattack\u001b[0m: Unzipping file /root/.cache/textattack/tmpc6pdkqvf.zip to /root/.cache/textattack/word_embeddings/paragramcf.\n",
      "\u001b[34;1mtextattack\u001b[0m: Successfully saved word_embeddings/paragramcf to cache.\n",
      "Downloading: 5.11kB [00:00, 4.99MB/s]       \n",
      "Downloading: 2.02kB [00:00, 2.04MB/s]     \n",
      "Using custom data configuration default\n",
      "Downloading and preparing dataset rotten_tomatoes_movie_review/default (download: 476.34 KiB, generated: 1.28 MiB, post-processed: Unknown size, total: 1.75 MiB) to /root/.cache/huggingface/datasets/rotten_tomatoes_movie_review/default/1.0.0/e06abb624abab47e1a64608fdfe65a913f5a68c66118408032644a3285208fb5...\n",
      "Downloading: 100% 488k/488k [00:00<00:00, 28.3MB/s]\n",
      "Dataset rotten_tomatoes_movie_review downloaded and prepared to /root/.cache/huggingface/datasets/rotten_tomatoes_movie_review/default/1.0.0/e06abb624abab47e1a64608fdfe65a913f5a68c66118408032644a3285208fb5. Subsequent calls will reuse this data.\n",
      "\u001b[34;1mtextattack\u001b[0m: Loading \u001b[94mdatasets\u001b[0m dataset \u001b[94mrotten_tomatoes\u001b[0m, split \u001b[94mtrain\u001b[0m.\n",
      "\u001b[34;1mtextattack\u001b[0m: Number of samples: \u001b[94m8530\u001b[0m\n",
      "\u001b[34;1mtextattack\u001b[0m: Number of words per input:\n",
      "\u001b[34;1mtextattack\u001b[0m: \ttotal:   \u001b[94m157755\u001b[0m\n",
      "\u001b[34;1mtextattack\u001b[0m: \tmean:    \u001b[94m18.49\u001b[0m\n",
      "\u001b[34;1mtextattack\u001b[0m: \tstd:     \u001b[94m8.58\u001b[0m\n",
      "\u001b[34;1mtextattack\u001b[0m: \tmin:     \u001b[94m1\u001b[0m\n",
      "\u001b[34;1mtextattack\u001b[0m: \tmax:     \u001b[94m51\u001b[0m\n",
      "\u001b[34;1mtextattack\u001b[0m: Dataset lowercased: \u001b[94mTrue\u001b[0m\n",
      "\u001b[34;1mtextattack\u001b[0m: First sample:\n",
      "the rock is destined to be the 21st century's new \" conan \" and that he's going to make a splash even greater than arnold schwarzenegger , jean-claud van damme or steven segal . \n",
      "\n",
      "\u001b[34;1mtextattack\u001b[0m: Last sample:\n",
      "things really get weird , though not particularly scary : the movie is all portent and no content . \n",
      "\n",
      "\u001b[34;1mtextattack\u001b[0m: Found 2 distinct outputs.\n",
      "\u001b[34;1mtextattack\u001b[0m: Most common outputs:\n",
      "\t 1      (4265)\n",
      "\t 0      (4265)\n"
     ]
    }
   ],
   "source": [
    "!textattack peek-dataset --dataset-from-huggingface rotten_tomatoes"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "uguqpjnLseoI"
   },
   "source": [
    "The dataset looks good! It's lowercased already, so we'll make sure our model is uncased. The longest input is 51 words, so we can cap our maximum sequence length (`--model-max-length`) at 64.\n",
    "\n",
    "We'll train [`distilbert-base-uncased`](https://huggingface.co/transformers/model_doc/distilbert.html), since it's a relatively small model, and a good example of how we integrate with `transformers`.\n",
    "\n",
    "So we have our command:\n",
    "\n",
    "```bash\n",
    "textattack train                      \\ # Train a model with TextAttack\n",
    "    --model distilbert-base-uncased   \\ # Using distilbert, uncased version, from `transformers`\n",
    "    --dataset rotten_tomatoes         \\ # On the Rotten Tomatoes dataset\n",
    "    --model-num-labels 3              \\ # That has 2 labels\n",
    "    --model-max-length 64             \\ # With a maximum sequence length of 64\n",
    "    --per-device-train-batch-size 128 \\ # And batch size of 128\n",
    "    --num-epochs 3                    \\ # For 3 epochs \n",
    "```\n",
    "\n",
    "Now let's run it (please remember to use GPU if you have access):"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "BY33W9aWseoI",
    "outputId": "0b0ec80a-6cec-4113-8474-b5bd78651b6c"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\u001b[34;1mtextattack\u001b[0m: Loading transformers AutoModelForSequenceClassification: distilbert-base-uncased\n",
      "Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_projector.bias', 'vocab_projector.weight', 'vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_layer_norm.bias']\n",
      "- This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n",
      "- This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n",
      "Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['classifier.weight', 'pre_classifier.bias', 'classifier.bias', 'pre_classifier.weight']\n",
      "You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n",
      "Using custom data configuration default\n",
      "Reusing dataset rotten_tomatoes_movie_review (/root/.cache/huggingface/datasets/rotten_tomatoes_movie_review/default/1.0.0/e06abb624abab47e1a64608fdfe65a913f5a68c66118408032644a3285208fb5)\n",
      "\u001b[34;1mtextattack\u001b[0m: Loading \u001b[94mdatasets\u001b[0m dataset \u001b[94mrotten_tomatoes\u001b[0m, split \u001b[94mtrain\u001b[0m.\n",
      "Using custom data configuration default\n",
      "Reusing dataset rotten_tomatoes_movie_review (/root/.cache/huggingface/datasets/rotten_tomatoes_movie_review/default/1.0.0/e06abb624abab47e1a64608fdfe65a913f5a68c66118408032644a3285208fb5)\n",
      "Using custom data configuration default\n",
      "Reusing dataset rotten_tomatoes_movie_review (/root/.cache/huggingface/datasets/rotten_tomatoes_movie_review/default/1.0.0/e06abb624abab47e1a64608fdfe65a913f5a68c66118408032644a3285208fb5)\n",
      "Using custom data configuration default\n",
      "Reusing dataset rotten_tomatoes_movie_review (/root/.cache/huggingface/datasets/rotten_tomatoes_movie_review/default/1.0.0/e06abb624abab47e1a64608fdfe65a913f5a68c66118408032644a3285208fb5)\n",
      "\u001b[34;1mtextattack\u001b[0m: Loading \u001b[94mdatasets\u001b[0m dataset \u001b[94mrotten_tomatoes\u001b[0m, split \u001b[94mvalidation\u001b[0m.\n",
      "\u001b[34;1mtextattack\u001b[0m: Writing logs to ./outputs/2021-10-13-17-37-27-247436/train_log.txt.\n",
      "\u001b[34;1mtextattack\u001b[0m: Wrote original training args to ./outputs/2021-10-13-17-37-27-247436/training_args.json.\n",
      "\u001b[34;1mtextattack\u001b[0m: ***** Running training *****\n",
      "\u001b[34;1mtextattack\u001b[0m:   Num examples = 8530\n",
      "\u001b[34;1mtextattack\u001b[0m:   Num epochs = 3\n",
      "\u001b[34;1mtextattack\u001b[0m:   Num clean epochs = 3\n",
      "\u001b[34;1mtextattack\u001b[0m:   Instantaneous batch size per device = 128\n",
      "\u001b[34;1mtextattack\u001b[0m:   Total train batch size (w. parallel, distributed & accumulation) = 128\n",
      "\u001b[34;1mtextattack\u001b[0m:   Gradient accumulation steps = 1\n",
      "\u001b[34;1mtextattack\u001b[0m:   Total optimization steps = 201\n",
      "\u001b[34;1mtextattack\u001b[0m: ==========================================================\n",
      "\u001b[34;1mtextattack\u001b[0m: Epoch 1\n",
      "\u001b[34;1mtextattack\u001b[0m: Running clean epoch 1/3\n",
      "Loss 0.68924: 100% 67/67 [01:16<00:00,  1.14s/it]\n",
      "\u001b[34;1mtextattack\u001b[0m: Train accuracy: 52.86%\n",
      "\u001b[34;1mtextattack\u001b[0m: Eval accuracy: 70.83%\n",
      "\u001b[34;1mtextattack\u001b[0m: Best score found. Saved model to ./outputs/2021-10-13-17-37-27-247436/best_model/\n",
      "\u001b[34;1mtextattack\u001b[0m: ==========================================================\n",
      "\u001b[34;1mtextattack\u001b[0m: Epoch 2\n",
      "\u001b[34;1mtextattack\u001b[0m: Running clean epoch 2/3\n",
      "Loss 0.59931: 100% 67/67 [01:16<00:00,  1.13s/it]\n",
      "\u001b[34;1mtextattack\u001b[0m: Train accuracy: 77.07%\n",
      "\u001b[34;1mtextattack\u001b[0m: Eval accuracy: 82.83%\n",
      "\u001b[34;1mtextattack\u001b[0m: Best score found. Saved model to ./outputs/2021-10-13-17-37-27-247436/best_model/\n",
      "\u001b[34;1mtextattack\u001b[0m: ==========================================================\n",
      "\u001b[34;1mtextattack\u001b[0m: Epoch 3\n",
      "\u001b[34;1mtextattack\u001b[0m: Running clean epoch 3/3\n",
      "Loss 0.51365: 100% 67/67 [01:16<00:00,  1.14s/it]\n",
      "\u001b[34;1mtextattack\u001b[0m: Train accuracy: 85.67%\n",
      "\u001b[34;1mtextattack\u001b[0m: Eval accuracy: 84.80%\n",
      "\u001b[34;1mtextattack\u001b[0m: Best score found. Saved model to ./outputs/2021-10-13-17-37-27-247436/best_model/\n",
      "\u001b[34;1mtextattack\u001b[0m: Wrote README to ./outputs/2021-10-13-17-37-27-247436/README.md.\n"
     ]
    }
   ],
   "source": [
    "!textattack train --model-name-or-path distilbert-base-uncased --dataset rotten_tomatoes --model-num-labels 2 --model-max-length 64 --per-device-train-batch-size 128 --num-epochs 3"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "4xzv3BGLseoI"
   },
   "source": [
    "## Evaluation\n",
    "\n",
    "We successfully fine-tuned `distilbert-base-cased` for 3 epochs. Now let's evaluate it using `textattack eval`. This is as simple as providing the path to the pretrained model (that you just obtain from running the above command!) to `--model`, along with the number of evaluation samples. `textattack eval` will automatically load the evaluation data from training:"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "qGYR_W6DseoJ",
    "outputId": "a4edf6d3-9ac5-4513-ea26-754b409d5847"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Using custom data configuration default\n",
      "Reusing dataset rotten_tomatoes_movie_review (/root/.cache/huggingface/datasets/rotten_tomatoes_movie_review/default/1.0.0/e06abb624abab47e1a64608fdfe65a913f5a68c66118408032644a3285208fb5)\n",
      "\u001b[34;1mtextattack\u001b[0m: Loading \u001b[94mdatasets\u001b[0m dataset \u001b[94mrotten_tomatoes\u001b[0m, split \u001b[94mtest\u001b[0m.\n",
      "\u001b[34;1mtextattack\u001b[0m: Got 1000 predictions.\n",
      "\u001b[34;1mtextattack\u001b[0m: Correct 847/1000 (\u001b[94m84.70%\u001b[0m)\n"
     ]
    }
   ],
   "source": [
    "!textattack eval --num-examples 1000 --model ./outputs/2021-10-13-17-37-27-247436/best_model/ --dataset-from-huggingface rotten_tomatoes --dataset-split test"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "sFPkCZShseoJ"
   },
   "source": [
    "Awesome -- we were able to train a model up to 84.9% accuracy on the test dataset – with only a single command!"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "wWglEuvUseoK"
   },
   "source": [
    "## Attack\n",
    "\n",
    "Finally, let's attack our pre-trained model. We can do this the same way as before (by providing the path to the pretrained model to `--model`). For our attack, let's use the \"TextFooler\" attack recipe, from the paper [\"Is BERT Really Robust? A Strong Baseline for Natural Language Attack on Text Classification and Entailment\" (Jin et al, 2019)](https://arxiv.org/abs/1907.11932). We can do this by passing `--recipe textfooler` to `textattack attack`.\n",
    "\n",
    "> *Warning*: We're printing out 100 examples and, if the attack succeeds, their perturbations. The output of this command is going to be quite long!\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "vL-Bo1bgseoK",
    "outputId": "aad8a4f1-bda7-4687-c79c-736201a29261"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Using custom data configuration default\n",
      "Reusing dataset rotten_tomatoes_movie_review (/root/.cache/huggingface/datasets/rotten_tomatoes_movie_review/default/1.0.0/e06abb624abab47e1a64608fdfe65a913f5a68c66118408032644a3285208fb5)\n",
      "\u001b[34;1mtextattack\u001b[0m: Loading \u001b[94mdatasets\u001b[0m dataset \u001b[94mrotten_tomatoes\u001b[0m, split \u001b[94mtest\u001b[0m.\n",
      "\u001b[34;1mtextattack\u001b[0m: Unknown if model of class <class 'transformers.models.distilbert.modeling_distilbert.DistilBertForSequenceClassification'> compatible with goal function <class 'textattack.goal_functions.classification.untargeted_classification.UntargetedClassification'>.\n",
      "Attack(\n",
      "  (search_method): GreedyWordSwapWIR(\n",
      "    (wir_method):  delete\n",
      "  )\n",
      "  (goal_function):  UntargetedClassification\n",
      "  (transformation):  WordSwapEmbedding(\n",
      "    (max_candidates):  50\n",
      "    (embedding):  WordEmbedding\n",
      "  )\n",
      "  (constraints): \n",
      "    (0): WordEmbeddingDistance(\n",
      "        (embedding):  WordEmbedding\n",
      "        (min_cos_sim):  0.5\n",
      "        (cased):  False\n",
      "        (include_unknown_words):  True\n",
      "        (compare_against_original):  True\n",
      "      )\n",
      "    (1): PartOfSpeech(\n",
      "        (tagger_type):  nltk\n",
      "        (tagset):  universal\n",
      "        (allow_verb_noun_swap):  True\n",
      "        (compare_against_original):  True\n",
      "      )\n",
      "    (2): UniversalSentenceEncoder(\n",
      "        (metric):  angular\n",
      "        (threshold):  0.840845057\n",
      "        (window_size):  15\n",
      "        (skip_text_shorter_than_window):  True\n",
      "        (compare_against_original):  False\n",
      "      )\n",
      "    (3): RepeatModification\n",
      "    (4): StopwordModification\n",
      "    (5): InputColumnModification(\n",
      "        (matching_column_labels):  ['premise', 'hypothesis']\n",
      "        (columns_to_ignore):  {'premise'}\n",
      "      )\n",
      "  (is_black_box):  True\n",
      ") \n",
      "\n",
      "  0% 0/100 [00:00<?, ?it/s]Using /tmp/tfhub_modules to cache modules.\n",
      "Downloading TF-Hub Module 'https://tfhub.dev/google/universal-sentence-encoder/4'.\n",
      "Downloaded https://tfhub.dev/google/universal-sentence-encoder/4, Total size: 987.47MB\n",
      "Downloaded TF-Hub Module 'https://tfhub.dev/google/universal-sentence-encoder/4'.\n",
      "2021-10-13 18:00:34.997360: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:937] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
      "2021-10-13 18:00:34.998552: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:937] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
      "2021-10-13 18:00:34.999292: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:937] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
      "2021-10-13 18:00:35.002729: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:937] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
      "2021-10-13 18:00:35.003549: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:937] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
      "2021-10-13 18:00:35.004300: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:937] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
      "2021-10-13 18:00:35.012931: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:937] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
      "2021-10-13 18:00:35.013723: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:937] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
      "2021-10-13 18:00:35.014528: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:937] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero\n",
      "2021-10-13 18:00:35.015277: W tensorflow/core/common_runtime/gpu/gpu_bfc_allocator.cc:39] Overriding allow_growth setting because the TF_FORCE_GPU_ALLOW_GROWTH environment variable is set. Original config value was 0.\n",
      "2021-10-13 18:00:35.015394: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1510] Created device /job:localhost/replica:0/task:0/device:GPU:0 with 10020 MB memory:  -> device: 0, name: Tesla K80, pci bus id: 0000:00:04.0, compute capability: 3.7\n",
      "2021-10-13 18:00:36.592844: W tensorflow/core/framework/cpu_allocator_impl.cc:80] Allocation of 34133760 exceeds 10% of free system memory.\n",
      "2021-10-13 18:00:36.609893: W tensorflow/core/framework/cpu_allocator_impl.cc:80] Allocation of 34133760 exceeds 10% of free system memory.\n",
      "2021-10-13 18:00:36.627953: W tensorflow/core/framework/cpu_allocator_impl.cc:80] Allocation of 34133760 exceeds 10% of free system memory.\n",
      "2021-10-13 18:00:36.688755: W tensorflow/core/framework/cpu_allocator_impl.cc:80] Allocation of 34133760 exceeds 10% of free system memory.\n",
      "2021-10-13 18:00:36.717187: W tensorflow/core/framework/cpu_allocator_impl.cc:80] Allocation of 34133760 exceeds 10% of free system memory.\n",
      "2021-10-13 18:00:38.127267: I tensorflow/compiler/mlir/mlir_graph_optimization_pass.cc:185] None of the MLIR Optimization Passes are enabled (registered 2)\n",
      "  1% 1/100 [00:22<36:37, 22.19s/it]--------------------------------------------- Result 1 ---------------------------------------------\n",
      "\u001b[92mPositive (95%)\u001b[0m --> \u001b[91mNegative (61%)\u001b[0m\n",
      "\n",
      "lovingly photographed in the manner of a golden book sprung to life , stuart little 2 \u001b[92mmanages\u001b[0m \u001b[92msweetness\u001b[0m largely without stickiness .\n",
      "\n",
      "lovingly photographed in the manner of a golden book sprung to life , stuart little 2 \u001b[91madministration\u001b[0m \u001b[91mhoneyed\u001b[0m largely without stickiness .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 1 / 0 / 0 / 1:   2% 2/100 [00:22<18:20, 11.23s/it]--------------------------------------------- Result 2 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (52%)\u001b[0m\n",
      "\n",
      "\u001b[92mconsistently\u001b[0m clever and \u001b[92msuspenseful\u001b[0m .\n",
      "\n",
      "\u001b[91mprogressively\u001b[0m clever and \u001b[91menigmatic\u001b[0m .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 2 / 0 / 0 / 2:   2% 2/100 [00:22<18:20, 11.23s/it]--------------------------------------------- Result 3 ---------------------------------------------\n",
      "\u001b[91mNegative (85%)\u001b[0m --> \u001b[37m[SKIPPED]\u001b[0m\n",
      "\n",
      "it's like a \" big chill \" reunion of the baader-meinhof gang , only these guys are more harmless pranksters than political activists .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 2 / 0 / 1 / 3:   4% 4/100 [00:22<09:10,  5.73s/it]--------------------------------------------- Result 4 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (59%)\u001b[0m\n",
      "\n",
      "the story gives ample opportunity for large-scale action and suspense , which director shekhar kapur supplies with \u001b[92mtremendous\u001b[0m \u001b[92mskill\u001b[0m .\n",
      "\n",
      "the story gives ample opportunity for large-scale action and suspense , which director shekhar kapur supplies with \u001b[91mstupendous\u001b[0m \u001b[91mskilful\u001b[0m .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 3 / 0 / 1 / 4:   4% 4/100 [00:22<09:10,  5.73s/it]--------------------------------------------- Result 5 ---------------------------------------------\n",
      "\u001b[91mNegative (76%)\u001b[0m --> \u001b[37m[SKIPPED]\u001b[0m\n",
      "\n",
      "red dragon \" never cuts corners .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 3 / 0 / 2 / 5:   6% 6/100 [00:23<06:02,  3.86s/it]--------------------------------------------- Result 6 ---------------------------------------------\n",
      "\u001b[92mPositive (73%)\u001b[0m --> \u001b[91mNegative (61%)\u001b[0m\n",
      "\n",
      "fresnadillo has something serious to say about the \u001b[92mways\u001b[0m in which extravagant chance can distort our perspective and throw us off the path of good sense .\n",
      "\n",
      "fresnadillo has something serious to say about the \u001b[91mmodo\u001b[0m in which extravagant chance can distort our perspective and throw us off the path of good sense .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 4 / 0 / 2 / 6:   6% 6/100 [00:23<06:02,  3.86s/it]--------------------------------------------- Result 7 ---------------------------------------------\n",
      "\u001b[92mPositive (97%)\u001b[0m --> \u001b[91mNegative (70%)\u001b[0m\n",
      "\n",
      "throws in enough clever and \u001b[92munexpected\u001b[0m \u001b[92mtwists\u001b[0m to make the formula feel fresh .\n",
      "\n",
      "throws in enough clever and \u001b[91munwanted\u001b[0m \u001b[91mtendrils\u001b[0m to make the formula feel fresh .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 5 / 0 / 2 / 7:   8% 8/100 [00:23<04:29,  2.93s/it]--------------------------------------------- Result 8 ---------------------------------------------\n",
      "\u001b[91mNegative (81%)\u001b[0m --> \u001b[37m[SKIPPED]\u001b[0m\n",
      "\n",
      "weighty and ponderous but every bit as filling as the treat of the title .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 5 / 0 / 3 / 8:   8% 8/100 [00:23<04:29,  2.93s/it]--------------------------------------------- Result 9 ---------------------------------------------\n",
      "\u001b[92mPositive (97%)\u001b[0m --> \u001b[91mNegative (72%)\u001b[0m\n",
      "\n",
      "a \u001b[92mreal\u001b[0m audience-pleaser that will \u001b[92mstrike\u001b[0m a \u001b[92mchord\u001b[0m with anyone who's ever waited in a doctor's office , emergency room , hospital bed or insurance company office .\n",
      "\n",
      "a \u001b[91mactual\u001b[0m audience-pleaser that will \u001b[91mslugged\u001b[0m a \u001b[91mchords\u001b[0m with anyone who's ever waited in a doctor's office , emergency room , hospital bed or insurance company office .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 6 / 0 / 3 / 9:  10% 10/100 [00:24<03:37,  2.41s/it]--------------------------------------------- Result 10 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (91%)\u001b[0m\n",
      "\n",
      "generates an \u001b[92menormous\u001b[0m feeling of empathy for its characters .\n",
      "\n",
      "generates an \u001b[91mdreaded\u001b[0m feeling of empathy for its characters .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 7 / 0 / 3 / 10:  10% 10/100 [00:24<03:37,  2.41s/it]--------------------------------------------- Result 11 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (53%)\u001b[0m\n",
      "\n",
      "exposing the ways we fool ourselves is one hour photo's real \u001b[92mstrength\u001b[0m .\n",
      "\n",
      "exposing the ways we fool ourselves is one hour photo's real \u001b[91mstrenght\u001b[0m .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 8 / 0 / 3 / 11:  12% 12/100 [00:24<02:58,  2.03s/it]--------------------------------------------- Result 12 ---------------------------------------------\n",
      "\u001b[91mNegative (59%)\u001b[0m --> \u001b[37m[SKIPPED]\u001b[0m\n",
      "\n",
      "it's up to you to decide whether to admire these people's dedication to their cause or be repelled by their dogmatism , manipulativeness and narrow , fearful view of american life .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 8 / 0 / 4 / 12:  12% 12/100 [00:24<02:58,  2.03s/it]--------------------------------------------- Result 13 ---------------------------------------------\n",
      "\u001b[91mNegative (80%)\u001b[0m --> \u001b[37m[SKIPPED]\u001b[0m\n",
      "\n",
      "mostly , [goldbacher] just lets her complicated characters be unruly , confusing and , through it all , human .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 8 / 0 / 5 / 13:  14% 14/100 [00:24<02:32,  1.77s/it]--------------------------------------------- Result 14 ---------------------------------------------\n",
      "\u001b[92mPositive (88%)\u001b[0m --> \u001b[91mNegative (96%)\u001b[0m\n",
      "\n",
      ". . . \u001b[92mquite\u001b[0m good at providing some \u001b[92mgood\u001b[0m old fashioned spooks .\n",
      "\n",
      ". . . \u001b[91mtoo\u001b[0m good at providing some \u001b[91mguten\u001b[0m old fashioned spooks .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 9 / 0 / 5 / 14:  14% 14/100 [00:24<02:32,  1.77s/it]--------------------------------------------- Result 15 ---------------------------------------------\n",
      "\u001b[91mNegative (91%)\u001b[0m --> \u001b[37m[SKIPPED]\u001b[0m\n",
      "\n",
      "at its worst , the movie is pretty diverting ; the pity is that it rarely achieves its best .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 9 / 0 / 6 / 15:  16% 16/100 [00:25<02:14,  1.60s/it]--------------------------------------------- Result 16 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (62%)\u001b[0m\n",
      "\n",
      "scherfig's light-hearted \u001b[92mprofile\u001b[0m of \u001b[92memotional\u001b[0m \u001b[92mdesperation\u001b[0m is achingly \u001b[92mhonest\u001b[0m and \u001b[92mdelightfully\u001b[0m cheeky .\n",
      "\n",
      "scherfig's light-hearted \u001b[91mcharacterize\u001b[0m of \u001b[91mpsychiatric\u001b[0m \u001b[91mdiscouragement\u001b[0m is achingly \u001b[91mcordial\u001b[0m and \u001b[91mblithely\u001b[0m cheeky .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 10 / 0 / 6 / 16:  16% 16/100 [00:25<02:14,  1.60s/it]--------------------------------------------- Result 17 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (74%)\u001b[0m\n",
      "\n",
      "a \u001b[92mjourney\u001b[0m \u001b[92mspanning\u001b[0m nearly three decades of bittersweet camaraderie and history , in which we feel that we truly know what makes holly and marina tick , and our \u001b[92mhearts\u001b[0m go out to them as both continue to negotiate their \u001b[92mimperfect\u001b[0m , love-hate relationship .\n",
      "\n",
      "a \u001b[91mtrekking\u001b[0m \u001b[91mexpectancy\u001b[0m nearly three decades of bittersweet camaraderie and history , in which we feel that we truly know what makes holly and marina tick , and our \u001b[91mcoeur\u001b[0m go out to them as both continue to negotiate their \u001b[91minadequate\u001b[0m , love-hate relationship .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 11 / 0 / 6 / 17:  18% 18/100 [00:26<02:02,  1.50s/it]--------------------------------------------- Result 18 ---------------------------------------------\n",
      "\u001b[92mPositive (97%)\u001b[0m --> \u001b[91mNegative (51%)\u001b[0m\n",
      "\n",
      "the \u001b[92mwonderfully\u001b[0m \u001b[92mlush\u001b[0m morvern callar is pure punk existentialism , and ms . ramsay and her co-writer , liana dognini , have dramatized the alan warner novel , which itself felt like an answer to irvine welsh's book trainspotting .\n",
      "\n",
      "the \u001b[91mappallingly\u001b[0m \u001b[91mimpeccably\u001b[0m morvern callar is pure punk existentialism , and ms . ramsay and her co-writer , liana dognini , have dramatized the alan warner novel , which itself felt like an answer to irvine welsh's book trainspotting .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 12 / 0 / 6 / 18:  18% 18/100 [00:26<02:02,  1.50s/it]--------------------------------------------- Result 19 ---------------------------------------------\n",
      "\u001b[92mPositive (64%)\u001b[0m --> \u001b[91mNegative (68%)\u001b[0m\n",
      "\n",
      "as it \u001b[92mturns\u001b[0m out , you can go \u001b[92mhome\u001b[0m again .\n",
      "\n",
      "as it \u001b[91mpivot\u001b[0m out , you can go \u001b[91mhomepage\u001b[0m again .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 13 / 0 / 6 / 19:  20% 20/100 [00:27<01:49,  1.37s/it]--------------------------------------------- Result 20 ---------------------------------------------\n",
      "\u001b[92mPositive (95%)\u001b[0m --> \u001b[91mNegative (78%)\u001b[0m\n",
      "\n",
      "you've already seen city by the sea under a variety of titles , but it's \u001b[92mworth\u001b[0m yet another visit .\n",
      "\n",
      "you've already seen city by the sea under a variety of titles , but it's \u001b[91mchastisement\u001b[0m yet another visit .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 14 / 0 / 6 / 20:  20% 20/100 [00:27<01:49,  1.37s/it]--------------------------------------------- Result 21 ---------------------------------------------\n",
      "\u001b[92mPositive (97%)\u001b[0m --> \u001b[91mNegative (59%)\u001b[0m\n",
      "\n",
      "this kind of hands-on \u001b[92mstorytelling\u001b[0m is ultimately what \u001b[92mmakes\u001b[0m shanghai ghetto move beyond a \u001b[92mgood\u001b[0m , dry , reliable textbook and what allows it to rank with its \u001b[92mworthy\u001b[0m predecessors .\n",
      "\n",
      "this kind of hands-on \u001b[91mmyth\u001b[0m is ultimately what \u001b[91mdo\u001b[0m shanghai ghetto move beyond a \u001b[91mopportune\u001b[0m , dry , reliable textbook and what allows it to rank with its \u001b[91mreputable\u001b[0m predecessors .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 15 / 0 / 6 / 21:  22% 22/100 [00:28<01:42,  1.31s/it]--------------------------------------------- Result 22 ---------------------------------------------\n",
      "\u001b[92mPositive (88%)\u001b[0m --> \u001b[91mNegative (56%)\u001b[0m\n",
      "\n",
      "making such a tragedy the backdrop to a love story risks trivializing it , though chouraqui no \u001b[92mdoubt\u001b[0m intended the \u001b[92mfilm\u001b[0m to affirm love's power to \u001b[92mhelp\u001b[0m people endure almost unimaginable horror .\n",
      "\n",
      "making such a tragedy the backdrop to a love story risks trivializing it , though chouraqui no \u001b[91msuspecting\u001b[0m intended the \u001b[91mmovies\u001b[0m to affirm love's power to \u001b[91mpomoc\u001b[0m people endure almost unimaginable horror .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 16 / 0 / 6 / 22:  22% 22/100 [00:28<01:42,  1.31s/it]--------------------------------------------- Result 23 ---------------------------------------------\n",
      "\u001b[91mNegative (54%)\u001b[0m --> \u001b[37m[SKIPPED]\u001b[0m\n",
      "\n",
      "grown-up quibbles are beside the point here . the little girls understand , and mccracken knows that's all that matters .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 16 / 0 / 7 / 23:  24% 24/100 [00:29<01:33,  1.22s/it]--------------------------------------------- Result 24 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (80%)\u001b[0m\n",
      "\n",
      "a \u001b[92mpowerful\u001b[0m , \u001b[92mchilling\u001b[0m , and affecting \u001b[92mstudy\u001b[0m of one man's dying fall .\n",
      "\n",
      "a \u001b[91mconclusive\u001b[0m , \u001b[91mmacabre\u001b[0m , and affecting \u001b[91mscrutinized\u001b[0m of one man's dying fall .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 17 / 0 / 7 / 24:  24% 24/100 [00:29<01:33,  1.22s/it]--------------------------------------------- Result 25 ---------------------------------------------\n",
      "\u001b[92mPositive (52%)\u001b[0m --> \u001b[91mNegative (65%)\u001b[0m\n",
      "\n",
      "this is a \u001b[92mfascinating\u001b[0m film because there is no clear-cut hero and no all-out villain .\n",
      "\n",
      "this is a \u001b[91minteresting\u001b[0m film because there is no clear-cut hero and no all-out villain .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 18 / 0 / 7 / 25:  26% 26/100 [00:29<01:24,  1.15s/it]--------------------------------------------- Result 26 ---------------------------------------------\n",
      "\u001b[92mPositive (86%)\u001b[0m --> \u001b[91mNegative (89%)\u001b[0m\n",
      "\n",
      "a dreadful day in irish history is given \u001b[92mpassionate\u001b[0m , if somewhat flawed , treatment .\n",
      "\n",
      "a dreadful day in irish history is given \u001b[91mvoracious\u001b[0m , if somewhat flawed , treatment .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 19 / 0 / 7 / 26:  26% 26/100 [00:29<01:24,  1.15s/it]--------------------------------------------- Result 27 ---------------------------------------------\n",
      "\u001b[92mPositive (94%)\u001b[0m --> \u001b[91mNegative (89%)\u001b[0m\n",
      "\n",
      ". . . a \u001b[92mgood\u001b[0m film that must have baffled the folks in the marketing department .\n",
      "\n",
      ". . . a \u001b[91madvisable\u001b[0m film that must have baffled the folks in the marketing department .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 20 / 0 / 7 / 27:  28% 28/100 [00:30<01:18,  1.10s/it]--------------------------------------------- Result 28 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (64%)\u001b[0m\n",
      "\n",
      ". . . is \u001b[92mfunny\u001b[0m in the \u001b[92mway\u001b[0m that makes you ache with sadness ( the way chekhov is funny ) , \u001b[92mprofound\u001b[0m without ever being self-important , \u001b[92mwarm\u001b[0m without ever \u001b[92msuccumbing\u001b[0m to sentimentality .\n",
      "\n",
      ". . . is \u001b[91moutlandish\u001b[0m in the \u001b[91mitineraries\u001b[0m that makes you ache with sadness ( the way chekhov is funny ) , \u001b[91mshum\u001b[0m without ever being self-important , \u001b[91mwarmest\u001b[0m without ever \u001b[91mfending\u001b[0m to sentimentality .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 21 / 0 / 7 / 28:  28% 28/100 [00:30<01:18,  1.10s/it]--------------------------------------------- Result 29 ---------------------------------------------\n",
      "\u001b[91mNegative (94%)\u001b[0m --> \u001b[37m[SKIPPED]\u001b[0m\n",
      "\n",
      "devotees of star trek ii : the wrath of khan will feel a nagging sense of deja vu , and the grandeur of the best next generation episodes is lacking .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 21 / 0 / 8 / 29:  30% 30/100 [00:32<01:15,  1.07s/it]--------------------------------------------- Result 30 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91m[FAILED]\u001b[0m\n",
      "\n",
      "a soul-stirring documentary about the israeli/palestinian conflict as revealed through the eyes of some children who remain curious about each other against all odds .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 21 / 1 / 8 / 30:  30% 30/100 [00:32<01:15,  1.07s/it]--------------------------------------------- Result 31 ---------------------------------------------\n",
      "\u001b[92mPositive (64%)\u001b[0m --> \u001b[91mNegative (87%)\u001b[0m\n",
      "\n",
      "what's so \u001b[92mstriking\u001b[0m about jolie's performance is that she never lets her character become a caricature -- not even with that radioactive hair .\n",
      "\n",
      "what's so \u001b[91mstaggering\u001b[0m about jolie's performance is that she never lets her character become a caricature -- not even with that radioactive hair .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 22 / 1 / 8 / 31:  32% 32/100 [00:32<01:08,  1.01s/it]--------------------------------------------- Result 32 ---------------------------------------------\n",
      "\u001b[91mNegative (67%)\u001b[0m --> \u001b[37m[SKIPPED]\u001b[0m\n",
      "\n",
      "the main story . . . is compelling enough , but it's difficult to shrug off the annoyance of that chatty fish .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 22 / 1 / 9 / 32:  32% 32/100 [00:32<01:08,  1.01s/it]--------------------------------------------- Result 33 ---------------------------------------------\n",
      "\u001b[92mPositive (96%)\u001b[0m --> \u001b[91mNegative (90%)\u001b[0m\n",
      "\n",
      "the performances are \u001b[92mimmaculate\u001b[0m , with roussillon providing comic relief .\n",
      "\n",
      "the performances are \u001b[91mfaultless\u001b[0m , with roussillon providing comic relief .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 23 / 1 / 9 / 33:  34% 34/100 [00:33<01:04,  1.03it/s]--------------------------------------------- Result 34 ---------------------------------------------\n",
      "\u001b[92mPositive (97%)\u001b[0m --> \u001b[91mNegative (55%)\u001b[0m\n",
      "\n",
      "kinnear . . . \u001b[92mgives\u001b[0m his best screen performance with an oddly \u001b[92mwinning\u001b[0m \u001b[92mportrayal\u001b[0m of one of life's ultimate losers .\n",
      "\n",
      "kinnear . . . \u001b[91mstipulates\u001b[0m his best screen performance with an oddly \u001b[91mwons\u001b[0m \u001b[91msketch\u001b[0m of one of life's ultimate losers .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 24 / 1 / 9 / 34:  34% 34/100 [00:33<01:04,  1.03it/s]--------------------------------------------- Result 35 ---------------------------------------------\n",
      "\u001b[91mNegative (59%)\u001b[0m --> \u001b[37m[SKIPPED]\u001b[0m\n",
      "\n",
      "hugh grant , who has a good line in charm , has never been more charming than in about a boy .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 24 / 1 / 10 / 35:  36% 36/100 [00:33<00:59,  1.08it/s]--------------------------------------------- Result 36 ---------------------------------------------\n",
      "\u001b[92mPositive (89%)\u001b[0m --> \u001b[91mNegative (60%)\u001b[0m\n",
      "\n",
      "there's a lot of tooth in roger dodger . but what's \u001b[92mnice\u001b[0m is that there's a casual intelligence that permeates the script .\n",
      "\n",
      "there's a lot of tooth in roger dodger . but what's \u001b[91mgentil\u001b[0m is that there's a casual intelligence that permeates the script .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 25 / 1 / 10 / 36:  36% 36/100 [00:33<00:59,  1.08it/s]--------------------------------------------- Result 37 ---------------------------------------------\n",
      "\u001b[91mNegative (78%)\u001b[0m --> \u001b[37m[SKIPPED]\u001b[0m\n",
      "\n",
      "reminiscent of alfred hitchcock's thrillers , most of the scary parts in 'signs' occur while waiting for things to happen .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 25 / 1 / 11 / 37:  38% 38/100 [00:33<00:55,  1.13it/s]--------------------------------------------- Result 38 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (58%)\u001b[0m\n",
      "\n",
      "one of the \u001b[92mbest\u001b[0m looking and \u001b[92mstylish\u001b[0m animated movies in quite a while . . .\n",
      "\n",
      "one of the \u001b[91mstrictest\u001b[0m looking and \u001b[91mtrendy\u001b[0m animated movies in quite a while . . .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 26 / 1 / 11 / 38:  38% 38/100 [00:33<00:55,  1.13it/s]--------------------------------------------- Result 39 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (70%)\u001b[0m\n",
      "\n",
      "its use of the thriller form to examine the labyrinthine ways in which people's lives cross and change , buffeted by events seemingly out of their control , is \u001b[92mintriguing\u001b[0m , \u001b[92mprovocative\u001b[0m stuff .\n",
      "\n",
      "its use of the thriller form to examine the labyrinthine ways in which people's lives cross and change , buffeted by events seemingly out of their control , is \u001b[91mdisconcerting\u001b[0m , \u001b[91mincite\u001b[0m stuff .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 27 / 1 / 11 / 39:  40% 40/100 [00:34<00:51,  1.16it/s]--------------------------------------------- Result 40 ---------------------------------------------\n",
      "\u001b[92mPositive (91%)\u001b[0m --> \u001b[91mNegative (54%)\u001b[0m\n",
      "\n",
      "denver \u001b[92mshould\u001b[0m not get the first and last look at one of the most triumphant performances of vanessa redgrave's career . it deserves to be seen everywhere .\n",
      "\n",
      "denver \u001b[91mwoud\u001b[0m not get the first and last look at one of the most triumphant performances of vanessa redgrave's career . it deserves to be seen everywhere .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 28 / 1 / 11 / 40:  40% 40/100 [00:34<00:51,  1.16it/s]--------------------------------------------- Result 41 ---------------------------------------------\n",
      "\u001b[92mPositive (60%)\u001b[0m --> \u001b[91mNegative (64%)\u001b[0m\n",
      "\n",
      "you needn't be steeped in '50s sociology , pop culture or movie lore to appreciate the emotional depth of haynes' work . \u001b[92mthough\u001b[0m haynes' style apes films from the period . . . its message is not rooted in that decade .\n",
      "\n",
      "you needn't be steeped in '50s sociology , pop culture or movie lore to appreciate the emotional depth of haynes' work . \u001b[91malbeit\u001b[0m haynes' style apes films from the period . . . its message is not rooted in that decade .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 29 / 1 / 11 / 41:  42% 42/100 [00:34<00:48,  1.21it/s]--------------------------------------------- Result 42 ---------------------------------------------\n",
      "\u001b[92mPositive (93%)\u001b[0m --> \u001b[91mNegative (59%)\u001b[0m\n",
      "\n",
      "waiting for godard can be \u001b[92mfruitful\u001b[0m : 'in praise of love' is the director's epitaph for himself .\n",
      "\n",
      "waiting for godard can be \u001b[91mpropitious\u001b[0m : 'in praise of love' is the director's epitaph for himself .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 30 / 1 / 11 / 42:  42% 42/100 [00:34<00:48,  1.20it/s]--------------------------------------------- Result 43 ---------------------------------------------\n",
      "\u001b[92mPositive (94%)\u001b[0m --> \u001b[91mNegative (94%)\u001b[0m\n",
      "\n",
      "a gangster movie with the capacity to \u001b[92msurprise\u001b[0m .\n",
      "\n",
      "a gangster movie with the capacity to \u001b[91mflabbergasted\u001b[0m .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 31 / 1 / 11 / 43:  44% 44/100 [00:35<00:44,  1.25it/s]--------------------------------------------- Result 44 ---------------------------------------------\n",
      "\u001b[92mPositive (78%)\u001b[0m --> \u001b[91mNegative (82%)\u001b[0m\n",
      "\n",
      "the film has a laundry list of minor shortcomings , but the numerous scenes of gory mayhem are \u001b[92mworth\u001b[0m the price of admission . . . if \" gory mayhem \" is your idea of a good time .\n",
      "\n",
      "the film has a laundry list of minor shortcomings , but the numerous scenes of gory mayhem are \u001b[91mpriceless\u001b[0m the price of admission . . . if \" gory mayhem \" is your idea of a good time .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 32 / 1 / 11 / 44:  44% 44/100 [00:35<00:44,  1.25it/s]--------------------------------------------- Result 45 ---------------------------------------------\n",
      "\u001b[91mNegative (52%)\u001b[0m --> \u001b[37m[SKIPPED]\u001b[0m\n",
      "\n",
      "if not a home run , then at least a solid base hit .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 32 / 1 / 12 / 45:  46% 46/100 [00:35<00:41,  1.30it/s]--------------------------------------------- Result 46 ---------------------------------------------\n",
      "\u001b[92mPositive (88%)\u001b[0m --> \u001b[91mNegative (79%)\u001b[0m\n",
      "\n",
      "goldmember is \u001b[92mfunny\u001b[0m enough to justify the embarrassment of bringing a barf bag to the moviehouse .\n",
      "\n",
      "goldmember is \u001b[91mcomical\u001b[0m enough to justify the embarrassment of bringing a barf bag to the moviehouse .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 33 / 1 / 12 / 46:  46% 46/100 [00:35<00:41,  1.30it/s]--------------------------------------------- Result 47 ---------------------------------------------\n",
      "\u001b[92mPositive (72%)\u001b[0m --> \u001b[91mNegative (93%)\u001b[0m\n",
      "\n",
      ". . . a fairly disposable yet still \u001b[92mentertaining\u001b[0m b picture .\n",
      "\n",
      ". . . a fairly disposable yet still \u001b[91mdroll\u001b[0m b picture .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 34 / 1 / 12 / 47:  48% 48/100 [00:36<00:39,  1.32it/s]--------------------------------------------- Result 48 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (52%)\u001b[0m\n",
      "\n",
      "it may not be particularly \u001b[92minnovative\u001b[0m , but the film's crisp , unaffected style and air of \u001b[92mgentle\u001b[0m \u001b[92mlonging\u001b[0m make it unexpectedly \u001b[92mrewarding\u001b[0m .\n",
      "\n",
      "it may not be particularly \u001b[91munpublished\u001b[0m , but the film's crisp , unaffected style and air of \u001b[91msoft\u001b[0m \u001b[91mvacuuming\u001b[0m make it unexpectedly \u001b[91mbounties\u001b[0m .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 35 / 1 / 12 / 48:  48% 48/100 [00:36<00:39,  1.32it/s]--------------------------------------------- Result 49 ---------------------------------------------\n",
      "\u001b[92mPositive (94%)\u001b[0m --> \u001b[91mNegative (80%)\u001b[0m\n",
      "\n",
      "the film \u001b[92mtruly\u001b[0m does rescue [the funk brothers] from motown's shadows . it's about time .\n",
      "\n",
      "the film \u001b[91mawfully\u001b[0m does rescue [the funk brothers] from motown's shadows . it's about time .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 36 / 1 / 12 / 49:  50% 50/100 [00:37<00:37,  1.34it/s]--------------------------------------------- Result 50 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (52%)\u001b[0m\n",
      "\n",
      "drawing on an \u001b[92mirresistible\u001b[0m , languid romanticism , byler \u001b[92mreveals\u001b[0m the \u001b[92mways\u001b[0m in which a sultry evening or a beer-fueled afternoon in the sun can inspire even the most retiring heart to venture forth .\n",
      "\n",
      "drawing on an \u001b[91mstupendous\u001b[0m , languid romanticism , byler \u001b[91mbetrays\u001b[0m the \u001b[91mmethodology\u001b[0m in which a sultry evening or a beer-fueled afternoon in the sun can inspire even the most retiring heart to venture forth .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 37 / 1 / 12 / 50:  50% 50/100 [00:37<00:37,  1.34it/s]--------------------------------------------- Result 51 ---------------------------------------------\n",
      "\u001b[91mNegative (92%)\u001b[0m --> \u001b[37m[SKIPPED]\u001b[0m\n",
      "\n",
      "works because we're never sure if ohlinger's on the level or merely a dying , delusional man trying to get into the history books before he croaks .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 37 / 1 / 13 / 51:  52% 52/100 [00:37<00:34,  1.38it/s]--------------------------------------------- Result 52 ---------------------------------------------\n",
      "\u001b[92mPositive (64%)\u001b[0m --> \u001b[91mNegative (68%)\u001b[0m\n",
      "\n",
      "[scherfig] \u001b[92mhas\u001b[0m made a movie that will leave you wondering about the characters' lives after the \u001b[92mclever\u001b[0m credits roll .\n",
      "\n",
      "[scherfig] \u001b[91mis\u001b[0m made a movie that will leave you wondering about the characters' lives after the \u001b[91mcleverer\u001b[0m credits roll .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 38 / 1 / 13 / 52:  52% 52/100 [00:37<00:34,  1.38it/s]--------------------------------------------- Result 53 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (51%)\u001b[0m\n",
      "\n",
      "a \u001b[92mheady\u001b[0m , \u001b[92mbiting\u001b[0m , be-bop ride through nighttime manhattan , a loquacious videologue of the \u001b[92mmodern\u001b[0m male and the lengths to which he'll go to weave a protective cocoon around his own ego .\n",
      "\n",
      "a \u001b[91mhectic\u001b[0m , \u001b[91mgnawing\u001b[0m , be-bop ride through nighttime manhattan , a loquacious videologue of the \u001b[91mupgraded\u001b[0m male and the lengths to which he'll go to weave a protective cocoon around his own ego .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 39 / 1 / 13 / 53:  54% 54/100 [00:38<00:32,  1.41it/s]--------------------------------------------- Result 54 ---------------------------------------------\n",
      "\u001b[92mPositive (97%)\u001b[0m --> \u001b[91mNegative (54%)\u001b[0m\n",
      "\n",
      "skin of man gets a few cheap shocks from its kids-in-peril theatrics , but it also \u001b[92mtaps\u001b[0m into the \u001b[92mprimal\u001b[0m fears of young people trying to cope with the mysterious and brutal nature of adults .\n",
      "\n",
      "skin of man gets a few cheap shocks from its kids-in-peril theatrics , but it also \u001b[91mfaucets\u001b[0m into the \u001b[91mprimordial\u001b[0m fears of young people trying to cope with the mysterious and brutal nature of adults .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 40 / 1 / 13 / 54:  54% 54/100 [00:38<00:32,  1.41it/s]--------------------------------------------- Result 55 ---------------------------------------------\n",
      "\u001b[92mPositive (86%)\u001b[0m --> \u001b[91mNegative (55%)\u001b[0m\n",
      "\n",
      "the piano teacher is not an easy film . it forces you to watch people doing unpleasant things to each other and themselves , and it maintains a \u001b[92mcool\u001b[0m distance from its material that is deliberately unsettling .\n",
      "\n",
      "the piano teacher is not an easy film . it forces you to watch people doing unpleasant things to each other and themselves , and it maintains a \u001b[91mcopacetic\u001b[0m distance from its material that is deliberately unsettling .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 41 / 1 / 13 / 55:  56% 56/100 [00:38<00:30,  1.45it/s]--------------------------------------------- Result 56 ---------------------------------------------\n",
      "\u001b[92mPositive (95%)\u001b[0m --> \u001b[91mNegative (91%)\u001b[0m\n",
      "\n",
      "as \u001b[92mrefreshing\u001b[0m as a drink from a woodland stream .\n",
      "\n",
      "as \u001b[91mretrofit\u001b[0m as a drink from a woodland stream .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 42 / 1 / 13 / 56:  56% 56/100 [00:38<00:30,  1.45it/s]--------------------------------------------- Result 57 ---------------------------------------------\n",
      "\u001b[92mPositive (55%)\u001b[0m --> \u001b[91mNegative (95%)\u001b[0m\n",
      "\n",
      "williams absolutely nails sy's queasy infatuation and overall \u001b[92mstrangeness\u001b[0m .\n",
      "\n",
      "williams absolutely nails sy's queasy infatuation and overall \u001b[91mennui\u001b[0m .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 43 / 1 / 13 / 57:  58% 58/100 [00:39<00:28,  1.48it/s]--------------------------------------------- Result 58 ---------------------------------------------\n",
      "\u001b[92mPositive (67%)\u001b[0m --> \u001b[91mNegative (52%)\u001b[0m\n",
      "\n",
      "can i admit xxx is as deep as a petri dish and as well-characterized as a telephone book but still say it was a guilty \u001b[92mpleasure\u001b[0m ?\n",
      "\n",
      "can i admit xxx is as deep as a petri dish and as well-characterized as a telephone book but still say it was a guilty \u001b[91mamusement\u001b[0m ?\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 44 / 1 / 13 / 58:  58% 58/100 [00:39<00:28,  1.48it/s]--------------------------------------------- Result 59 ---------------------------------------------\n",
      "\u001b[92mPositive (85%)\u001b[0m --> \u001b[91mNegative (55%)\u001b[0m\n",
      "\n",
      "while it's nothing we haven't seen before from murphy , i spy is still fun and \u001b[92menjoyable\u001b[0m and so aggressively silly that it's more than a worthwhile effort .\n",
      "\n",
      "while it's nothing we haven't seen before from murphy , i spy is still fun and \u001b[91mcosy\u001b[0m and so aggressively silly that it's more than a worthwhile effort .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 45 / 1 / 13 / 59:  60% 60/100 [00:39<00:26,  1.52it/s]--------------------------------------------- Result 60 ---------------------------------------------\n",
      "\u001b[91mNegative (73%)\u001b[0m --> \u001b[37m[SKIPPED]\u001b[0m\n",
      "\n",
      "by the time it ends in a rush of sequins , flashbulbs , blaring brass and back-stabbing babes , it has said plenty about how show business has infiltrated every corner of society -- and not always for the better .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 45 / 1 / 14 / 60:  60% 60/100 [00:39<00:26,  1.52it/s]--------------------------------------------- Result 61 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (75%)\u001b[0m\n",
      "\n",
      "an \u001b[92mintimate\u001b[0m contemplation of two marvelously messy lives .\n",
      "\n",
      "an \u001b[91msqueamish\u001b[0m contemplation of two marvelously messy lives .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 46 / 1 / 14 / 61:  62% 62/100 [00:39<00:24,  1.56it/s]--------------------------------------------- Result 62 ---------------------------------------------\n",
      "\u001b[92mPositive (54%)\u001b[0m --> \u001b[91mNegative (58%)\u001b[0m\n",
      "\n",
      "rarely has skin looked as \u001b[92mbeautiful\u001b[0m , desirable , even delectable , as it does in trouble every day .\n",
      "\n",
      "rarely has skin looked as \u001b[91mnice\u001b[0m , desirable , even delectable , as it does in trouble every day .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 47 / 1 / 14 / 62:  62% 62/100 [00:39<00:24,  1.56it/s]--------------------------------------------- Result 63 ---------------------------------------------\n",
      "\u001b[92mPositive (97%)\u001b[0m --> \u001b[91mNegative (60%)\u001b[0m\n",
      "\n",
      "this is one of those rare docs that paints a \u001b[92mgrand\u001b[0m picture of an \u001b[92mera\u001b[0m and \u001b[92mmakes\u001b[0m the \u001b[92mjourney\u001b[0m feel like a \u001b[92mparty\u001b[0m .\n",
      "\n",
      "this is one of those rare docs that paints a \u001b[91mhefty\u001b[0m picture of an \u001b[91meras\u001b[0m and \u001b[91mai\u001b[0m the \u001b[91mtrip\u001b[0m feel like a \u001b[91mportion\u001b[0m .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 48 / 1 / 14 / 63:  64% 64/100 [00:40<00:22,  1.57it/s]--------------------------------------------- Result 64 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (94%)\u001b[0m\n",
      "\n",
      "\u001b[92mpoignant\u001b[0m if familiar story of a young person suspended between two cultures .\n",
      "\n",
      "\u001b[91mdisquieting\u001b[0m if familiar story of a young person suspended between two cultures .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 49 / 1 / 14 / 64:  64% 64/100 [00:40<00:22,  1.57it/s]--------------------------------------------- Result 65 ---------------------------------------------\n",
      "\u001b[92mPositive (94%)\u001b[0m --> \u001b[91mNegative (93%)\u001b[0m\n",
      "\n",
      "a \u001b[92mmetaphor\u001b[0m for a modern-day urban china searching for its identity .\n",
      "\n",
      "a \u001b[91mcliché\u001b[0m for a modern-day urban china searching for its identity .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 50 / 1 / 14 / 65:  66% 66/100 [00:41<00:21,  1.60it/s]--------------------------------------------- Result 66 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (69%)\u001b[0m\n",
      "\n",
      "for all its brooding quality , ash wednesday is \u001b[92msuspenseful\u001b[0m and ultimately unpredictable , with a \u001b[92msterling\u001b[0m ensemble cast .\n",
      "\n",
      "for all its brooding quality , ash wednesday is \u001b[91mupsetting\u001b[0m and ultimately unpredictable , with a \u001b[91mstirling\u001b[0m ensemble cast .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 51 / 1 / 14 / 66:  66% 66/100 [00:41<00:21,  1.60it/s]--------------------------------------------- Result 67 ---------------------------------------------\n",
      "\u001b[92mPositive (90%)\u001b[0m --> \u001b[91mNegative (52%)\u001b[0m\n",
      "\n",
      "an odd \u001b[92mdrama\u001b[0m set in the \u001b[92mworld\u001b[0m of lingerie models and bar dancers in the midwest that held my interest precisely because it didn't try to .\n",
      "\n",
      "an odd \u001b[91mcinematographic\u001b[0m set in the \u001b[91mglobo\u001b[0m of lingerie models and bar dancers in the midwest that held my interest precisely because it didn't try to .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 52 / 1 / 14 / 67:  68% 68/100 [00:41<00:19,  1.63it/s]--------------------------------------------- Result 68 ---------------------------------------------\n",
      "\u001b[92mPositive (85%)\u001b[0m --> \u001b[91mNegative (67%)\u001b[0m\n",
      "\n",
      "the film feels uncomfortably \u001b[92mreal\u001b[0m , its language and locations bearing the unmistakable stamp of authority .\n",
      "\n",
      "the film feels uncomfortably \u001b[91mactual\u001b[0m , its language and locations bearing the unmistakable stamp of authority .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 53 / 1 / 14 / 68:  68% 68/100 [00:41<00:19,  1.63it/s]--------------------------------------------- Result 69 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (90%)\u001b[0m\n",
      "\n",
      "despite its faults , gangs \u001b[92mexcels\u001b[0m in spectacle and pacing .\n",
      "\n",
      "despite its faults , gangs \u001b[91moverwhelms\u001b[0m in spectacle and pacing .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 54 / 1 / 14 / 69:  70% 70/100 [00:42<00:18,  1.66it/s]--------------------------------------------- Result 70 ---------------------------------------------\n",
      "\u001b[92mPositive (80%)\u001b[0m --> \u001b[91mNegative (51%)\u001b[0m\n",
      "\n",
      "\u001b[92mentertaining\u001b[0m despite its one-joke premise with the thesis that women from venus and men from mars can indeed get together .\n",
      "\n",
      "\u001b[91mamusing\u001b[0m despite its one-joke premise with the thesis that women from venus and men from mars can indeed get together .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 55 / 1 / 14 / 70:  70% 70/100 [00:42<00:18,  1.66it/s]--------------------------------------------- Result 71 ---------------------------------------------\n",
      "\u001b[92mPositive (97%)\u001b[0m --> \u001b[91mNegative (91%)\u001b[0m\n",
      "\n",
      "a tightly directed , \u001b[92mhighly\u001b[0m professional film that's old-fashioned in all the best possible ways .\n",
      "\n",
      "a tightly directed , \u001b[91mexcessively\u001b[0m professional film that's old-fashioned in all the best possible ways .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 56 / 1 / 14 / 71:  72% 72/100 [00:43<00:16,  1.66it/s]--------------------------------------------- Result 72 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (60%)\u001b[0m\n",
      "\n",
      "it's \u001b[92mdark\u001b[0m but has \u001b[92mwonderfully\u001b[0m \u001b[92mfunny\u001b[0m \u001b[92mmoments\u001b[0m ; you \u001b[92mcare\u001b[0m about the characters ; and the \u001b[92maction\u001b[0m and special effects are first-rate .\n",
      "\n",
      "it's \u001b[91mghoulish\u001b[0m but has \u001b[91munspeakably\u001b[0m \u001b[91mjoke\u001b[0m \u001b[91mmins\u001b[0m ; you \u001b[91mzorg\u001b[0m about the characters ; and the \u001b[91moperating\u001b[0m and special effects are first-rate .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 57 / 1 / 14 / 72:  72% 72/100 [00:43<00:16,  1.66it/s]--------------------------------------------- Result 73 ---------------------------------------------\n",
      "\u001b[92mPositive (89%)\u001b[0m --> \u001b[91mNegative (52%)\u001b[0m\n",
      "\n",
      "in visual fertility \u001b[92mtreasure\u001b[0m planet rivals the \u001b[92mtop\u001b[0m japanese animations of recent vintage .\n",
      "\n",
      "in visual fertility \u001b[91mcoffer\u001b[0m planet rivals the \u001b[91msupremo\u001b[0m japanese animations of recent vintage .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 58 / 1 / 14 / 73:  74% 74/100 [00:44<00:15,  1.68it/s]--------------------------------------------- Result 74 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (96%)\u001b[0m\n",
      "\n",
      "enormously \u001b[92menjoyable\u001b[0m , high-adrenaline \u001b[92mdocumentary\u001b[0m .\n",
      "\n",
      "enormously \u001b[91mdroll\u001b[0m , high-adrenaline \u001b[91mpaperwork\u001b[0m .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 59 / 1 / 14 / 74:  74% 74/100 [00:44<00:15,  1.68it/s]--------------------------------------------- Result 75 ---------------------------------------------\n",
      "\u001b[92mPositive (97%)\u001b[0m --> \u001b[91mNegative (50%)\u001b[0m\n",
      "\n",
      "buy is an accomplished actress , and this is a big , \u001b[92mjuicy\u001b[0m role .\n",
      "\n",
      "buy is an accomplished actress , and this is a big , \u001b[91mcrusty\u001b[0m role .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 60 / 1 / 14 / 75:  76% 76/100 [00:44<00:14,  1.69it/s]--------------------------------------------- Result 76 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (58%)\u001b[0m\n",
      "\n",
      "it \u001b[92mworks\u001b[0m its \u001b[92mmagic\u001b[0m with such \u001b[92mexuberance\u001b[0m and passion that the film's length becomes a part of its \u001b[92mfun\u001b[0m .\n",
      "\n",
      "it \u001b[91mfunctioned\u001b[0m its \u001b[91mpotions\u001b[0m with such \u001b[91melation\u001b[0m and passion that the film's length becomes a part of its \u001b[91mbanter\u001b[0m .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 61 / 1 / 14 / 76:  76% 76/100 [00:44<00:14,  1.69it/s]--------------------------------------------- Result 77 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (58%)\u001b[0m\n",
      "\n",
      "\u001b[92mbeautifully\u001b[0m crafted and \u001b[92mbrutally\u001b[0m \u001b[92mhonest\u001b[0m , promises offers an \u001b[92munexpected\u001b[0m \u001b[92mwindow\u001b[0m into the complexities of the middle east \u001b[92mstruggle\u001b[0m and into the \u001b[92mhumanity\u001b[0m of its people .\n",
      "\n",
      "\u001b[91mimpossibly\u001b[0m crafted and \u001b[91mhastily\u001b[0m \u001b[91mveritable\u001b[0m , promises offers an \u001b[91munforeseen\u001b[0m \u001b[91mfibreglass\u001b[0m into the complexities of the middle east \u001b[91mtussle\u001b[0m and into the \u001b[91mhumans\u001b[0m of its people .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 62 / 1 / 14 / 77:  78% 78/100 [00:46<00:13,  1.68it/s]--------------------------------------------- Result 78 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (67%)\u001b[0m\n",
      "\n",
      "an old-fashioned but emotionally \u001b[92mstirring\u001b[0m adventure tale of the kind they rarely make anymore .\n",
      "\n",
      "an old-fashioned but emotionally \u001b[91mwavering\u001b[0m adventure tale of the kind they rarely make anymore .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 63 / 1 / 14 / 78:  78% 78/100 [00:46<00:13,  1.68it/s]--------------------------------------------- Result 79 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (93%)\u001b[0m\n",
      "\n",
      "charlotte sometimes is a \u001b[92mgem\u001b[0m . it's always \u001b[92menthralling\u001b[0m .\n",
      "\n",
      "charlotte sometimes is a \u001b[91mbling\u001b[0m . it's always \u001b[91mhallucinatory\u001b[0m .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 64 / 1 / 14 / 79:  80% 80/100 [00:47<00:11,  1.70it/s]--------------------------------------------- Result 80 ---------------------------------------------\n",
      "\u001b[92mPositive (92%)\u001b[0m --> \u001b[91mNegative (50%)\u001b[0m\n",
      "\n",
      "in my opinion , analyze that is not as funny or entertaining as \u001b[92manalyze\u001b[0m this , but it is a \u001b[92mrespectable\u001b[0m sequel .\n",
      "\n",
      "in my opinion , analyze that is not as funny or entertaining as \u001b[91mdiscusses\u001b[0m this , but it is a \u001b[91mreputable\u001b[0m sequel .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 65 / 1 / 14 / 80:  80% 80/100 [00:47<00:11,  1.70it/s]--------------------------------------------- Result 81 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (94%)\u001b[0m\n",
      "\n",
      "a \u001b[92mremarkable\u001b[0m film by bernard rose .\n",
      "\n",
      "a \u001b[91mwhopping\u001b[0m film by bernard rose .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 66 / 1 / 14 / 81:  82% 82/100 [00:47<00:10,  1.72it/s]--------------------------------------------- Result 82 ---------------------------------------------\n",
      "\u001b[92mPositive (80%)\u001b[0m --> \u001b[91mNegative (58%)\u001b[0m\n",
      "\n",
      "zhuangzhuang creates delicate balance of style , text , and subtext that's so simple and \u001b[92mprecise\u001b[0m that anything discordant would topple the balance , but against all odds , nothing does .\n",
      "\n",
      "zhuangzhuang creates delicate balance of style , text , and subtext that's so simple and \u001b[91mspecify\u001b[0m that anything discordant would topple the balance , but against all odds , nothing does .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 67 / 1 / 14 / 82:  82% 82/100 [00:47<00:10,  1.72it/s]--------------------------------------------- Result 83 ---------------------------------------------\n",
      "\u001b[92mPositive (84%)\u001b[0m --> \u001b[91mNegative (71%)\u001b[0m\n",
      "\n",
      "a much more \u001b[92msuccessful\u001b[0m translation than its most famous previous film adaptation , writer-director anthony friedman's similarly updated 1970 british production .\n",
      "\n",
      "a much more \u001b[91mpropitious\u001b[0m translation than its most famous previous film adaptation , writer-director anthony friedman's similarly updated 1970 british production .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 68 / 1 / 14 / 83:  84% 84/100 [00:47<00:09,  1.75it/s]--------------------------------------------- Result 84 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (56%)\u001b[0m\n",
      "\n",
      "an \u001b[92moriginal\u001b[0m and highly cerebral examination of the psychopathic mind\n",
      "\n",
      "an \u001b[91mrudimentary\u001b[0m and highly cerebral examination of the psychopathic mind\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 69 / 1 / 14 / 84:  84% 84/100 [00:47<00:09,  1.75it/s]--------------------------------------------- Result 85 ---------------------------------------------\n",
      "\u001b[92mPositive (93%)\u001b[0m --> \u001b[91mNegative (83%)\u001b[0m\n",
      "\n",
      "michel piccoli's \u001b[92mmoving\u001b[0m performance is this films reason for being .\n",
      "\n",
      "michel piccoli's \u001b[91mresettled\u001b[0m performance is this films reason for being .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 70 / 1 / 14 / 85:  86% 86/100 [00:48<00:07,  1.76it/s]--------------------------------------------- Result 86 ---------------------------------------------\n",
      "\u001b[92mPositive (99%)\u001b[0m --> \u001b[91mNegative (95%)\u001b[0m\n",
      "\n",
      "a \u001b[92mcaptivating\u001b[0m and \u001b[92mintimate\u001b[0m \u001b[92mstudy\u001b[0m about \u001b[92mdying\u001b[0m and loving . . .\n",
      "\n",
      "a \u001b[91mhallucinatory\u001b[0m and \u001b[91mcosy\u001b[0m \u001b[91mscrutinized\u001b[0m about \u001b[91mdecedent\u001b[0m and loving . . .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 71 / 1 / 14 / 86:  86% 86/100 [00:48<00:07,  1.76it/s]--------------------------------------------- Result 87 ---------------------------------------------\n",
      "\u001b[92mPositive (96%)\u001b[0m --> \u001b[91mNegative (75%)\u001b[0m\n",
      "\n",
      "this is an \u001b[92melegantly\u001b[0m \u001b[92mbalanced\u001b[0m movie -- every member of the ensemble has something fascinating to do -- that doesn't reveal even a hint of artifice .\n",
      "\n",
      "this is an \u001b[91mprettily\u001b[0m \u001b[91mbalancing\u001b[0m movie -- every member of the ensemble has something fascinating to do -- that doesn't reveal even a hint of artifice .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 72 / 1 / 14 / 87:  88% 88/100 [00:49<00:06,  1.78it/s]--------------------------------------------- Result 88 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (61%)\u001b[0m\n",
      "\n",
      "[grant] goes beyond his usual fluttering and stammering and \u001b[92mcaptures\u001b[0m the \u001b[92msoul\u001b[0m of a man in pain who gradually comes to recognize it and deal with it .\n",
      "\n",
      "[grant] goes beyond his usual fluttering and stammering and \u001b[91mincarcerate\u001b[0m the \u001b[91mwits\u001b[0m of a man in pain who gradually comes to recognize it and deal with it .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 73 / 1 / 14 / 88:  88% 88/100 [00:49<00:06,  1.78it/s]--------------------------------------------- Result 89 ---------------------------------------------\n",
      "\u001b[92mPositive (96%)\u001b[0m --> \u001b[91mNegative (58%)\u001b[0m\n",
      "\n",
      "a high-spirited buddy \u001b[92mmovie\u001b[0m about the \u001b[92mreunion\u001b[0m of \u001b[92mberlin\u001b[0m \u001b[92manarchists\u001b[0m who \u001b[92mface\u001b[0m \u001b[92marrest\u001b[0m 15 \u001b[92myears\u001b[0m after their \u001b[92mcrime\u001b[0m .\n",
      "\n",
      "a high-spirited buddy \u001b[91mvideo\u001b[0m about the \u001b[91mpooled\u001b[0m of \u001b[91mgermania\u001b[0m \u001b[91manarchist\u001b[0m who \u001b[91mfacial\u001b[0m \u001b[91mintercepted\u001b[0m 15 \u001b[91molds\u001b[0m after their \u001b[91mpenal\u001b[0m .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 74 / 1 / 14 / 89:  90% 90/100 [00:50<00:05,  1.77it/s]--------------------------------------------- Result 90 ---------------------------------------------\n",
      "\u001b[91mNegative (84%)\u001b[0m --> \u001b[37m[SKIPPED]\u001b[0m\n",
      "\n",
      "about the best thing you could say about narc is that it's a rock-solid little genre picture . whether you like it or not is basically a matter of taste .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 74 / 1 / 15 / 90:  90% 90/100 [00:50<00:05,  1.77it/s]--------------------------------------------- Result 91 ---------------------------------------------\n",
      "\u001b[92mPositive (97%)\u001b[0m --> \u001b[91mNegative (80%)\u001b[0m\n",
      "\n",
      "an involving , \u001b[92minspirational\u001b[0m \u001b[92mdrama\u001b[0m that sometimes falls prey to its sob-story trappings .\n",
      "\n",
      "an involving , \u001b[91mincentive\u001b[0m \u001b[91mcataclysmic\u001b[0m that sometimes falls prey to its sob-story trappings .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 75 / 1 / 15 / 91:  92% 92/100 [00:51<00:04,  1.79it/s]--------------------------------------------- Result 92 ---------------------------------------------\n",
      "\u001b[92mPositive (96%)\u001b[0m --> \u001b[91mNegative (75%)\u001b[0m\n",
      "\n",
      "some of the most \u001b[92minventive\u001b[0m silliness you are likely to witness in a movie theatre for some time .\n",
      "\n",
      "some of the most \u001b[91mcontrivance\u001b[0m silliness you are likely to witness in a movie theatre for some time .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 76 / 1 / 15 / 92:  92% 92/100 [00:51<00:04,  1.79it/s]--------------------------------------------- Result 93 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (57%)\u001b[0m\n",
      "\n",
      "canadian \u001b[92mfilmmaker\u001b[0m gary burns' \u001b[92minventive\u001b[0m and mordantly \u001b[92mhumorous\u001b[0m \u001b[92mtake\u001b[0m on the soullessness of work in the \u001b[92mcity\u001b[0m .\n",
      "\n",
      "canadian \u001b[91mscriptwriter\u001b[0m gary burns' \u001b[91minventor\u001b[0m and mordantly \u001b[91mprank\u001b[0m \u001b[91mtakes\u001b[0m on the soullessness of work in the \u001b[91mshing\u001b[0m .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 77 / 1 / 15 / 93:  94% 94/100 [00:52<00:03,  1.78it/s]--------------------------------------------- Result 94 ---------------------------------------------\n",
      "\u001b[92mPositive (98%)\u001b[0m --> \u001b[91mNegative (65%)\u001b[0m\n",
      "\n",
      "a rollicking \u001b[92mride\u001b[0m , with jaw-dropping action sequences , striking villains , a \u001b[92mgorgeous\u001b[0m color palette , astounding technology , \u001b[92mstirring\u001b[0m music and a boffo last hour that leads up to a strangely sinister happy ending .\n",
      "\n",
      "a rollicking \u001b[91mwrinkle\u001b[0m , with jaw-dropping action sequences , striking villains , a \u001b[91mleggy\u001b[0m color palette , astounding technology , \u001b[91magitation\u001b[0m music and a boffo last hour that leads up to a strangely sinister happy ending .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 78 / 1 / 15 / 94:  94% 94/100 [00:52<00:03,  1.78it/s]--------------------------------------------- Result 95 ---------------------------------------------\n",
      "\u001b[92mPositive (99%)\u001b[0m --> \u001b[91mNegative (51%)\u001b[0m\n",
      "\n",
      "everyone's insecure in lovely and \u001b[92mamazing\u001b[0m , a \u001b[92mpoignant\u001b[0m and wryly amusing film about mothers , daughters and their relationships .\n",
      "\n",
      "everyone's insecure in lovely and \u001b[91mwhopping\u001b[0m , a \u001b[91mdisquieting\u001b[0m and wryly amusing film about mothers , daughters and their relationships .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 79 / 1 / 15 / 95:  96% 96/100 [00:53<00:02,  1.80it/s]--------------------------------------------- Result 96 ---------------------------------------------\n",
      "\u001b[92mPositive (52%)\u001b[0m --> \u001b[91mNegative (75%)\u001b[0m\n",
      "\n",
      "the closest thing to the \u001b[92mexperience\u001b[0m of space travel\n",
      "\n",
      "the closest thing to the \u001b[91mpilot\u001b[0m of space travel\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 80 / 1 / 15 / 96:  96% 96/100 [00:53<00:02,  1.80it/s]--------------------------------------------- Result 97 ---------------------------------------------\n",
      "\u001b[92mPositive (96%)\u001b[0m --> \u001b[91mNegative (96%)\u001b[0m\n",
      "\n",
      "full of \u001b[92msurprises\u001b[0m .\n",
      "\n",
      "full of \u001b[91mstumped\u001b[0m .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 81 / 1 / 15 / 97:  98% 98/100 [00:54<00:01,  1.81it/s]--------------------------------------------- Result 98 ---------------------------------------------\n",
      "\u001b[92mPositive (90%)\u001b[0m --> \u001b[91mNegative (52%)\u001b[0m\n",
      "\n",
      "connoisseurs of \u001b[92mchinese\u001b[0m film will be pleased to discover that tian's meticulous \u001b[92mtalent\u001b[0m \u001b[92mhas\u001b[0m not \u001b[92mwithered\u001b[0m during his enforced \u001b[92mhiatus\u001b[0m .\n",
      "\n",
      "connoisseurs of \u001b[91mcantonese\u001b[0m film will be pleased to discover that tian's meticulous \u001b[91mstaffing\u001b[0m \u001b[91mis\u001b[0m not \u001b[91mbloomed\u001b[0m during his enforced \u001b[91mharford\u001b[0m .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 82 / 1 / 15 / 98:  98% 98/100 [00:54<00:01,  1.81it/s]--------------------------------------------- Result 99 ---------------------------------------------\n",
      "\u001b[92mPositive (95%)\u001b[0m --> \u001b[91mNegative (69%)\u001b[0m\n",
      "\n",
      "if you can push on through the slow spots , you'll be \u001b[92mrewarded\u001b[0m with some \u001b[92mfine\u001b[0m \u001b[92macting\u001b[0m .\n",
      "\n",
      "if you can push on through the slow spots , you'll be \u001b[91mrecompense\u001b[0m with some \u001b[91mwondrous\u001b[0m \u001b[91mbehaving\u001b[0m .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 83 / 1 / 15 / 99: 100% 100/100 [00:54<00:00,  1.84it/s]--------------------------------------------- Result 100 ---------------------------------------------\n",
      "\u001b[91mNegative (50%)\u001b[0m --> \u001b[37m[SKIPPED]\u001b[0m\n",
      "\n",
      "an unusually dry-eyed , even analytical approach to material that is generally played for maximum moisture .\n",
      "\n",
      "\n",
      "[Succeeded / Failed / Skipped / Total] 83 / 1 / 16 / 100: 100% 100/100 [00:54<00:00,  1.84it/s]\n",
      "\n",
      "+-------------------------------+--------+\n",
      "| Attack Results                |        |\n",
      "+-------------------------------+--------+\n",
      "| Number of successful attacks: | 83     |\n",
      "| Number of failed attacks:     | 1      |\n",
      "| Number of skipped attacks:    | 16     |\n",
      "| Original accuracy:            | 84.0%  |\n",
      "| Accuracy under attack:        | 1.0%   |\n",
      "| Attack success rate:          | 98.81% |\n",
      "| Average perturbed word %:     | 13.68% |\n",
      "| Average num. words per input: | 18.45  |\n",
      "| Avg num queries:              | 80.13  |\n",
      "+-------------------------------+--------+\n"
     ]
    }
   ],
   "source": [
    "!textattack attack --recipe textfooler --num-examples 100 --model ./outputs/2021-10-13-17-37-27-247436/best_model/ --dataset-from-huggingface rotten_tomatoes --dataset-split test"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "iyrJM3CaseoL"
   },
   "source": [
    "Looks like our model was 84% successful (makes sense - same evaluation set as `textattack eval`!), meaning that TextAttack attacked the model with 84 examples (since the attack won't run if an example is originally mispredicted). The attack success rate was 98.8%, meaning that TextFooler failed to find an adversarial example only 1.2% (1 out of 84) of the time.\n",
    "\n",
    "\n",
    "## Conclusion\n",
    "\n",
    "That's all, folks! We've learned how to train, evaluate, and attack a model with TextAttack, using only three commands! 😀\n",
    "\n",
    "\n",
    "\n",
    "## Bonus\n",
    "\n",
    "There are many powerful functions in TextAttack, we can use through command lines. Here is a list of examples as bonus for your learning. "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "!textattack attack --recipe deepwordbug --model lstm-mr --num-examples 2 --log-summary-to-json attack_summary.json"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "!textattack attack --model cnn-yelp --num-examples 3 --search-method greedy-word-wir --transformation word-swap-wordnet --constraints cola^max_diff=0.1 bert-score^min_bert_score=0.7 --enable-advance-metrics"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "!textattack attack --model lstm-mr --recipe deepwordbug --num-examples 2 --attack-n --enable-advance-metrics"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "!textattack attack --model lstm-mr --recipe hotflip --num-examples 4 --num-examples-offset 3 --enable-advance-metrics"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "!textattack attack --model-from-huggingface distilbert-base-uncased-finetuned-sst-2-english --dataset-from-huggingface glue^sst2^train --recipe deepwordbug --num-examples 3 --enable-advance-metrics"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "! textattack attack --model cnn-imdb --attack-from-file tests/sample_inputs/attack_from_file.py^Attack --num-examples 2  --num-examples-offset 18 --attack-n"
   ]
  }
 ],
 "metadata": {
  "accelerator": "GPU",
  "colab": {
   "collapsed_sections": [],
   "name": "0_End_to_End.ipynb",
   "provenance": []
  },
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.11"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 1
}
