{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import os\n",
    "\n",
    "os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = '/home/husein/t5/prepare/mesolitica-tpu.json'\n",
    "os.environ['CUDA_VISIBLE_DEVICES'] = ''"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "from bigbird import modeling\n",
    "from bigbird import utils\n",
    "import tensorflow as tf\n",
    "import numpy as np"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "import tokenization\n",
    "\n",
    "tokenizer = tokenization.FullTokenizer(\n",
    "    vocab_file='pegasus.wordpiece', do_lower_case=False\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "top_p = tf.placeholder(tf.float32, None, name = 'top_p')\n",
    "temperature = tf.placeholder(tf.float32, None, name = 'temperature')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "bert_config = {\n",
    "    'attention_probs_dropout_prob': 0.1,\n",
    "    'hidden_act': 'relu',\n",
    "    'hidden_dropout_prob': 0.1,\n",
    "    'hidden_size': 512,\n",
    "    'initializer_range': 0.02,\n",
    "    'intermediate_size': 3072,\n",
    "    'max_position_embeddings': 4096,\n",
    "    'max_encoder_length': 1536,\n",
    "    'max_decoder_length': 768,\n",
    "    'num_attention_heads': 8,\n",
    "    'num_hidden_layers': 6,\n",
    "    'type_vocab_size': 2,\n",
    "    'scope': 'pegasus',\n",
    "    'use_bias': False,\n",
    "    'rescale_embedding': True,\n",
    "    'vocab_model_file': None,\n",
    "    # sparse mask configs\n",
    "    'attention_type': 'block_sparse',\n",
    "    'norm_type': 'prenorm',\n",
    "    'block_size': 64,\n",
    "    'num_rand_blocks': 3,\n",
    "    'vocab_size': 32000,\n",
    "    'beam_size': 1,\n",
    "    'alpha': 0.0,\n",
    "    'couple_encoder_decoder': False,\n",
    "    'num_warmup_steps': 10000,\n",
    "    'learning_rate': 0.0001,\n",
    "    'label_smoothing': 0.1,\n",
    "    'optimizer': 'Adafactor',\n",
    "    'use_tpu': False,\n",
    "    'top_p': top_p,\n",
    "    'temperature': temperature\n",
    "}"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "model = modeling.TransformerModel(bert_config)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From /home/husein/malaya/malaya/pretrained-model/bigbird/bigbird/modeling.py:226: where (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use tf.where in 2.0, which has the same broadcast rule as np.where\n",
      "WARNING:tensorflow:From /home/husein/.local/lib/python3.6/site-packages/tensorflow_core/python/autograph/converters/directives.py:119: The name tf.random_uniform is deprecated. Please use tf.random.uniform instead.\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From /home/husein/.local/lib/python3.6/site-packages/tensorflow_core/python/autograph/converters/directives.py:119: The name tf.random_uniform is deprecated. Please use tf.random.uniform instead.\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From /home/husein/.local/lib/python3.6/site-packages/tensorflow_core/python/autograph/converters/directives.py:119: The name tf.log is deprecated. Please use tf.math.log instead.\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From /home/husein/.local/lib/python3.6/site-packages/tensorflow_core/python/autograph/converters/directives.py:119: The name tf.log is deprecated. Please use tf.math.log instead.\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From /home/husein/.local/lib/python3.6/site-packages/tensorflow_core/python/util/deprecation.py:507: calling count_nonzero (from tensorflow.python.ops.math_ops) with axis is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "reduction_indices is deprecated, use axis instead\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From /home/husein/.local/lib/python3.6/site-packages/tensorflow_core/python/util/deprecation.py:507: calling count_nonzero (from tensorflow.python.ops.math_ops) with axis is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "reduction_indices is deprecated, use axis instead\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "((<tf.Tensor 'pegasus/log_probs:0' shape=(?, 768) dtype=float32>,\n",
       "  <tf.Tensor 'pegasus/logits:0' shape=(?, 768, 32000) dtype=float32>,\n",
       "  <tf.Tensor 'pegasus/while/Exit_1:0' shape=(?, 768) dtype=int32>),\n",
       " <tf.Tensor 'pegasus/encoder/LayerNorm/batchnorm/add_1:0' shape=(?, 1536, 512) dtype=float32>)"
      ]
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "X = tf.placeholder(tf.int32, [None, None])\n",
    "r = model(X, training = False)\n",
    "r"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<tf.Tensor 'logits:0' shape=(?, 768) dtype=int32>"
      ]
     },
     "execution_count": 10,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "logits = tf.identity(r[0][2], name = 'logits')\n",
    "logits"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'gs://mesolitica-tpu-general/bigbird-summarization-small/model.ckpt-200000'"
      ]
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import tensorflow as tf\n",
    "\n",
    "ckpt_path = tf.train.latest_checkpoint('gs://mesolitica-tpu-general/bigbird-summarization-small')\n",
    "ckpt_path"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "sess = tf.Session()\n",
    "sess.run(tf.global_variables_initializer())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Restoring parameters from gs://mesolitica-tpu-general/bigbird-summarization-small/model.ckpt-200000\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Restoring parameters from gs://mesolitica-tpu-general/bigbird-summarization-small/model.ckpt-200000\n"
     ]
    }
   ],
   "source": [
    "saver = tf.train.Saver()\n",
    "saver.restore(sess, ckpt_path)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [],
   "source": [
    "import re\n",
    "from unidecode import unidecode\n",
    "\n",
    "def cleaning(string):\n",
    "    return re.sub(r'[ ]+', ' ', unidecode(string.replace('\\n', ' '))).strip()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [],
   "source": [
    "string = \"\"\"\n",
    "KUALA LUMPUR: Hakim Mahkamah Tinggi, Mohd Nazlan Mohd Ghazali menyifatkan kes penyelewengan dana RM42 juta milik SRC International Sdn Bhd dihadapi Datuk Seri Najib Razak adalah kesalahan salah guna kedudukan, pecah amanah jenayah dan pengubahan wang haram yang paling teruk.\n",
    "\n",
    "Mohd Nazlan yang mensabitkan Najib terhadap kesemua tujuh tuduhan dan memerintahkan bekas Perdana Menteri itu dipenjara 12 tahun, dan didenda RM210 juta, berkata ia bukan sahaja disebabkan oleh alasan bagaimana jenayah itu dilakukan, malah kes berprofil tinggi berkenaan turut membabitkan sejumlah wang yang sangat besar.\n",
    "\n",
    "Melalui alasan penghakiman penuh setebal 801 muka surat itu, Mohd Nazlan, berkata kes terbabit mempunyai elemen yang memberikan kesan ke atas kepentingan awam kerana dana RM42 juta itu adalah milik Kementerian Kewangan (Diperbadankan) (MKD) yang berkemungkinan berasal daripada dana pencen Kumpulan Wang Persaraan (Diperbadankan) (KWAP) berjumlah RM4 bilion.\n",
    "\n",
    "\"Dan yang paling penting ia membabitkan individu yang pada ketika itu berada dalam pada tertinggi dalam kerajaan,\" katanya.\n",
    "\n",
    "Pada 28 Julai lalu, Mohd Nazlan memerintahkan Najib dipenjarakan 10 tahun masing-masing bagi tiga tuduhan pecah amanah wang RM42 juta milik SRC.\n",
    "\n",
    "Hakim turut memerintahkan Najib dipenjara 12 tahun dan denda RM210 juta (jika gagal bayar, lima tahun penjara) bagi tuduhan menyalahgunakan kedudukan.\n",
    "\n",
    "Bagi tuduhan pengubahan wang haram pula, Mohd Nazlan memerintahkan Najib dipenjara 10 tahun bagi setiap tuduhan.\n",
    "\n",
    "Sementara itu, Mohd Nazlan berkata, Najib selaku tertuduh tidak menunjukkan penyesalan, malah mempertahankan pembelaan beliau tidak mengetahui mengenai wang RM42 juta milik SRC itu dalam rayuannya bagi diringankan hukuman.\n",
    "\n",
    "\"Tetapi saya tidak boleh menafikan beliau adalah Perdana Menteri negara ini dan tidak boleh mempersoalkan sumbangannya untuk kebaikan dan kesejahteraan masyarakat dalam pelbagai cara kerana beliau adalah Perdana Menteri selama sembilan tahun.\n",
    "\n",
    "\"Sejarah politik akan terus diperdebatkan sama ada dari segi keseimbangan, beliau melakukan lebih banyak kebaikan daripada keburukan.\n",
    "\n",
    "\"Walau apa pun, ia adalah tidak selari dengan idea sesebuah pentadbiran negara yang bersih daripada rasuah yang tidak boleh bertolak ansur dengan sebarang penyalahgunaan kuasa,\" katanya.\n",
    "\n",
    "Mahkamah Rayuan menetapkan pada 15 Oktober ini bagi pengurusan kes rayuan Najib terhadap sabitan dan hukuman terhadapnya.\n",
    "\"\"\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [],
   "source": [
    "string2 = \"\"\"\n",
    "Gabungan parti Warisan, Pakatan Harapan, dan Upko hari ini mendedahkan calon-calon masing-masing untuk pilihan raya negeri Sabah, tetapi ketika pengumuman itu berlangsung, perwakilan PKR di dewan itu dilihat ‘gelisah’ seperti ‘tidak senang duduk’.\n",
    "\n",
    "Sekumpulan anggota PKR kemudian dilihat meninggalkan dewan di Pusat Konvensyen Antarabangsa Sabah di Kota Kinabalu selepas berbincang dengan ketua PKR Sabah Christina Liew.\n",
    "\n",
    "Semakan senarai-senarai calon berkenaan mendapati PKR hanya memperolehi separuh daripada jumlah kerusi yang diharapkan.\n",
    "\n",
    "Semalam, PKR Sabah mengumumkan akan bertanding di 14 kerusi tetapi ketika Presiden Warisan Shafie Apdal mengumumkan calon gabungan tersebut hari ini, PKR hanya diberikan tujuh kerusi untuk bertanding.\n",
    "\n",
    "Kerusi yang diberikan adalah Api-Api, Inanam, Tempasuk, Tamparuli, Matunggong, Klias, dan Sook.\n",
    "\n",
    "Klias dan Sook adalah dua kerusi yang diberikan kepada PKR, sementara lima kerusi selebihnya pernah ditandingi oleh PKR pada pilihan raya umum 2018.\n",
    "\n",
    "Dalam pengumuman PKR Sabah semalam, parti itu menjangkakan Warisan akan turut menyerahkan kerusi Kemabong, Membakut, dan Petagas kepada mereka.\n",
    "\n",
    "Walau bagaimanapun, Warisan menyerahkan kerusi Kemabong kepada Upko dan mengekalkan bertanding untuk kerusi Membakut dan Petagas.\n",
    "\n",
    "PKR juga menuntut empat daripada 13 kerusi baru yang diperkenalkan iaitu Segama, Limbahau, Sungai Manila, dan Pintasan tetapi Warisan membolot semua kerusi itu.\n",
    "\n",
    "Sebagai pertukaran untuk kerusi yang diintainya, PKR bersedia untuk menyerahkan kerusi Kadaimaian, Kuala Penyu, dan Karanaan. Namun, ini dijangka tidak akan berlaku memandangkan parti tersebut tidak berpuas hati dengan agihan kerusi seperti yang diharapkan itu.\n",
    "\n",
    "Selepas perwakilan dari PKR dan Liew keluar dari dewan tersebut, wartawan kemudian menyusuri Liew untuk mendapatkan penjelasannya.\n",
    "\n",
    "Walau bagaimanapun, Liew enggan memberikan sebarang komen dan berkata bahawa dia ingin ke tandas.\n",
    "\n",
    "Liew dan perwakilan PKR kemudian tidak kembali ke dalam dewan tersebut.\n",
    "\n",
    "Apabila calon pilihan raya yang diumumkan diminta naik ke atas pentas untuk sesi bergambar, Liew tidak kelihatan.\n",
    "\n",
    "Bilangan kerusi yang ditandingi oleh PKR kali ini hanya kurang satu kerusi daripada yang ditandingi parti itu pada PRU 2018.\n",
    "\n",
    "Dalam perkembangan berkaitan, DAP dan Amanah dikatakan tidak mempunyai sebarang masalah dengan kerusi yang diberikan untuk PRN Sabah.\n",
    "\n",
    "Sementara itu, Presiden Upko Madius Tangau enggan mengulas adakah dia berpuas hati dengan agihan kerusi tersebut. Madius kekal di majlis tersebut sehingga ia berakhir.\n",
    "\n",
    "Partinya diberikan 12 kerusi, iaitu lebih tujuh kerusi berbanding PRU lalu.\n",
    "\n",
    "DAP dan Amanah akan bertanding di bawah logo Warisan sementara PKR dan Upko akan menggunakan logo masing-masing.\n",
    "\n",
    "DAP akan bertanding di tujuh kerusi, jumlah yang sama seperti yang mereka tandingi pada PRU lalu, sementara Amanah diberi satu kerusi.\n",
    "\n",
    "Warisan akan bertanding sebanyak 54 kerusi.\n",
    "\n",
    "Perkembangan terbaru ini mungkin mencetuskan pergeseran di antara PKR dan Warisan. PKR boleh memilih untuk bertanding di lebih banyak kerusi daripada 14 yang dituntutnya manakala Warisan juga boleh bertanding di kerusi sekutunya.\n",
    "\n",
    "Barisan pemimpin tertinggi PKR dan Warisan hanya mempunyai dua hari sebelum hari penamaan calon pada Sabtu untuk mengurangkan pergeseran.\n",
    "\"\"\"\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [],
   "source": [
    "string3 = \"\"\"\n",
    "Penubuhan universiti sukan seperti diutarakan Ketua Unit Sukan Kementerian Pengajian Tinggi, Dr Pekan Ramli dan disokong Pakar Pembangunan Sukan dan Reakreasi Luar, Universiti Pendidikan Sultan Idris (UPSI), Prof Dr Md Amin Md Taaf seperti disiarkan akhbar ini, memberikan sinar harapan kepada kewujudan institusi sedemikian.\n",
    "\n",
    "Ia menjadi impian atlet negara untuk mengejar kejayaan dalam bidang sukan dan kecemerlangan dalam akademik untuk menjamin masa depan lebih baik apabila bersara daripada arena sukan kelak.\n",
    "\n",
    "Pelbagai pandangan, idea, kaedah, bukti dan cadangan dilontarkan pakar berikutan pentingnya universiti sukan yang akan memberi impak besar sama ada pada peringkat kebangsaan mahupun antarabangsa.\n",
    "\n",
    "Negara lain sudah lama meraih laba dengan kewujudan universiti sukan seperti China, Korea, Japan, Taiwan, India dan Vietnam. Mereka menghasilkan atlet universiti yang mempamerkan keputusan cemerlang pada peringkat tinggi seperti Sukan Olimpik, Kejohanan Dunia dan Sukan Asia.\n",
    "\n",
    "Justeru, kejayaan mereka perlu dijadikan rujukan demi memajukan sukan tanah air. Jika kita merujuk pendekatan Asia, kewujudan universiti sukan penting dan memberi kesan positif dalam melonjakkan prestasi sukan lebih optimum.\n",
    "\n",
    "Namun, jika kita melihat pendekatan Eropah, universiti sukan bukan antara organisasi atau institusi penting yang diberi perhatian dalam menyumbang kepada pemenang pingat.\n",
    "\n",
    "Antara isu dalam universiti sukan ialah kos tinggi, lokasi, prasarana sukan, pertindihan kursus dengan universiti sedia ada dan impak terhadap dunia sukan negara hingga mengundang persoalan kewajaran dan kerelevanan penubuhannya.\n",
    "\n",
    "Namun sebagai bekas atlet memanah negara dan Olympian (OLY) di Sukan Olimpik 2004 di Athens, Greece serta bekas pelajar Sekolah Sukan Bukit Jalil hingga berjaya dalam dunia akademik, saya mendapati terdapat beberapa faktor sering menjadi halangan dalam rutin harian mereka.\n",
    "\n",
    "Antaranya, faktor masa yang terpaksa bergegas menghadiri kuliah selepas tamat sesi latihan yang mengambil masa 15 hingga 20 minit dengan menunggang motosikal; kereta (20-30 minit) atau pengangkutan disediakan Majlis Sukan Negara (MSN) ke Universiti Putra Malaysia (UPM).\n",
    "\n",
    "Jika mereka menuntut di Universiti Teknologi MARA (UiTM) atau Universiti Malaya (UM), ia mungkin lebih lama.\n",
    "\n",
    "Walaupun di universiti tersedia dengan kemudahan kolej dan kemudahan sukan, mereka memilih pulang ke MSN untuk menjalani latihan bersama pasukan dan jurulatih di padang atau gelanggang latihan rasmi.\n",
    "\n",
    "Ini berlanjutan selagi bergelar atlet negara yang perlu memastikan prestasi sentiasa meningkat dari semasa ke semasa tanpa mengabaikan tugas sebagai pelajar.\n",
    "\n",
    "Alangkah baiknya jika sebahagian Sekolah Sukan Bukit Jalil itu sendiri dijadikan Kolej atau Universiti Sukan Malaysia kerana lengkap dari segi kemudahan prasarana sukannya dan proses pengajaran dan pembelajaran (PdP) dalam bidang Sains Sukan, Kejurulatihan, Pendidikan Jasmani dan setaraf dengannya.\n",
    "\n",
    "Pengambilan setiap semester pula hanya terhad kepada atlet berstatus kebangsaan dan antarabangsa sahaja supaya hasrat melahirkan lebih ramai atlet bertaraf Olimpik mudah direalisasikan.\n",
    "\n",
    "Contohnya, bekas atlet lompat bergalah negara, Roslinda Samsu yang juga pemenang pingat perak Sukan Asia Doha 2006 dan Penerima Anugerah Khas Majlis Anugerah Sukan KPT 2012, terpaksa mengambil masa lebih kurang sembilan tahun untuk menamatkan ijazah Sarjana Muda Pendidikan Jasmani di UPM sepanjang 14 tahun terbabit dalam sukan olahraga.\n",
    "\n",
    "Sepanjang tempoh bergelar atlet kebangsaan dan mahasiswa, beliau juga memenangi pingat Emas Sukan SEA empat siri berturut-turut pada 2005, 2007, 2009 dan 2011.\n",
    "\n",
    "Begitu juga atlet kebangsaan seperti Leong Mun Yee (UPM); Pandalela Renong (UM); Bryan Nickson Lomas (UM); Cheng Chu Sian (UPM); Marbawi Sulaiman (UiTM) dan Norasheela Khalid (UPM).\n",
    "\n",
    "Jika disenaraikan, mungkin lebih ramai lagi. Namun, pernah terlintas di fikiran mengapa hanya atlet dari sukan terjun yang dapat memenangi pingat di Sukan Olimpik? Bagaimana dengan atlet lain yang juga layak secara merit? Apakah kekangan atau masalah dihadapi sebagai atlet dan mahasiswa?\n",
    "\n",
    "Adakah kewujudan universiti sukan akan memberi impak besar kepada kemajuan sukan negara? Jika dirancang dan diatur dengan cekap dan sistematik, ia perkara tidak mustahil dicapai.\n",
    "\"\"\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [],
   "source": [
    "pad_sequences = tf.keras.preprocessing.sequence.pad_sequences"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [],
   "source": [
    "encoded = tokenizer.convert_tokens_to_ids(tokenizer.tokenize(cleaning(string2)))\n",
    "s = pad_sequences([encoded], padding='post', maxlen = 1536)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "CPU times: user 37.3 s, sys: 856 ms, total: 38.2 s\n",
      "Wall time: 2.99 s\n"
     ]
    }
   ],
   "source": [
    "%%time\n",
    "\n",
    "l = sess.run(logits, feed_dict = {X: s, top_p: 0.0, temperature: 0.0})"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "metadata": {},
   "outputs": [],
   "source": [
    "def merge_ids_to_string(tokenizer, ids):\n",
    "    tokens = tokenizer.convert_ids_to_tokens(ids)\n",
    "    new_tokens = []\n",
    "    n_tokens = len(tokens)\n",
    "    i = 0\n",
    "    while i < n_tokens:\n",
    "        current_token = tokens[i]\n",
    "        if current_token.startswith('##'):\n",
    "            previous_token = new_tokens.pop()\n",
    "            merged_token = previous_token\n",
    "            while current_token.startswith('##'):\n",
    "                merged_token = merged_token + current_token.replace('##', '')\n",
    "                i = i + 1\n",
    "                current_token = tokens[i]\n",
    "            new_tokens.append(merged_token)\n",
    "\n",
    "        else:\n",
    "            new_tokens.append(current_token)\n",
    "            i = i + 1\n",
    "\n",
    "    words = [\n",
    "        i\n",
    "        for i in new_tokens\n",
    "        if i not in ['[CLS]', '[SEP]', '[PAD]']\n",
    "    ]\n",
    "    return ' '.join(words)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'Dikenali sebagai \" Sukan \" , Sukan UPM telah menjadi sejenisnya sukan universiti yang menyediakan pelajar dan fakulti . Negara telah mengambil isu dengan Sukanonton , pelajar dan atlet untuk soalan - soalan penyelidikan . Negara telah mengadakan acara sukan selama lebih 20 tahun dan telah menjadi satu - satunya atlet yang dianugerahkan pingat dalam setiap pingat .'"
      ]
     },
     "execution_count": 37,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "merge_ids_to_string(tokenizer, l[0])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "CPU times: user 28.9 s, sys: 789 ms, total: 29.7 s\n",
      "Wall time: 2.26 s\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "'Dikenali sebagai \" government winds \" , Najib kekal sebagai Perdana Menteri yang paling teruk dilanda jenayah . Beliau telah dua kali cuba untuk memenangi anugerah kepada keluarga bekas pendakwa raya . Beliau menghadapi 10 tuduhan penyelewengan dana dan pecah rumah .'"
      ]
     },
     "execution_count": 40,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "%%time\n",
    "encoded = tokenizer.convert_tokens_to_ids(tokenizer.tokenize(cleaning(string)))\n",
    "s = pad_sequences([encoded], padding='post', maxlen = 1536)\n",
    "l = sess.run(logits, feed_dict = {X: s, top_p: 0.0, temperature: 0.0})\n",
    "merge_ids_to_string(tokenizer, l[0])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 41,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "CPU times: user 37.2 s, sys: 1.11 s, total: 38.3 s\n",
      "Wall time: 2.97 s\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "'Dikenali sebagai \" Sukan \" , Sukan UPM telah menjadi sejenisnya sukan universiti yang menyediakan pelajar dan fakulti . Negara telah mengambil isu dengan Sukanonton , pelajar dan atlet untuk soalan - soalan penyelidikan . Negara telah mengadakan acara sukan selama lebih 20 tahun dan telah menjadi satu - satunya atlet yang dianugerahkan pingat dalam setiap pingat .'"
      ]
     },
     "execution_count": 41,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "%%time\n",
    "encoded = tokenizer.convert_tokens_to_ids(tokenizer.tokenize(cleaning(string3)))\n",
    "s = pad_sequences([encoded], padding='post', maxlen = 1536)\n",
    "l = sess.run(logits, feed_dict = {X: s, top_p: 0.0, temperature: 0.0})\n",
    "merge_ids_to_string(tokenizer, l[0])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 42,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'output/model.ckpt'"
      ]
     },
     "execution_count": 42,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "saver = tf.train.Saver(tf.trainable_variables())\n",
    "saver.save(sess, 'output/model.ckpt')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "['top_p',\n",
       " 'temperature',\n",
       " 'pegasus/embeddings/word_embeddings',\n",
       " 'pegasus/embeddings/position_embeddings',\n",
       " 'Placeholder',\n",
       " 'pegasus/encoder/layer_0/attention/self/LayerNorm/gamma',\n",
       " 'pegasus/encoder/layer_0/attention/self/query/kernel',\n",
       " 'pegasus/encoder/layer_0/attention/self/key/kernel',\n",
       " 'pegasus/encoder/layer_0/attention/self/value/kernel',\n",
       " 'pegasus/encoder/layer_0/attention/self/Softmax',\n",
       " 'pegasus/encoder/layer_0/attention/self/Softmax_1',\n",
       " 'pegasus/encoder/layer_0/attention/self/Softmax_2',\n",
       " 'pegasus/encoder/layer_0/attention/self/Softmax_3',\n",
       " 'pegasus/encoder/layer_0/attention/self/Softmax_4',\n",
       " 'pegasus/encoder/layer_0/attention/output/dense/kernel',\n",
       " 'pegasus/encoder/layer_0/intermediate/LayerNorm/gamma',\n",
       " 'pegasus/encoder/layer_0/intermediate/dense/kernel',\n",
       " 'pegasus/encoder/layer_0/intermediate/dense/bias',\n",
       " 'pegasus/encoder/layer_0/output/dense/kernel',\n",
       " 'pegasus/encoder/layer_0/output/dense/bias',\n",
       " 'pegasus/encoder/layer_1/attention/self/LayerNorm/gamma',\n",
       " 'pegasus/encoder/layer_1/attention/self/query/kernel',\n",
       " 'pegasus/encoder/layer_1/attention/self/key/kernel',\n",
       " 'pegasus/encoder/layer_1/attention/self/value/kernel',\n",
       " 'pegasus/encoder/layer_1/attention/self/Softmax',\n",
       " 'pegasus/encoder/layer_1/attention/self/Softmax_1',\n",
       " 'pegasus/encoder/layer_1/attention/self/Softmax_2',\n",
       " 'pegasus/encoder/layer_1/attention/self/Softmax_3',\n",
       " 'pegasus/encoder/layer_1/attention/self/Softmax_4',\n",
       " 'pegasus/encoder/layer_1/attention/output/dense/kernel',\n",
       " 'pegasus/encoder/layer_1/intermediate/LayerNorm/gamma',\n",
       " 'pegasus/encoder/layer_1/intermediate/dense/kernel',\n",
       " 'pegasus/encoder/layer_1/intermediate/dense/bias',\n",
       " 'pegasus/encoder/layer_1/output/dense/kernel',\n",
       " 'pegasus/encoder/layer_1/output/dense/bias',\n",
       " 'pegasus/encoder/layer_2/attention/self/LayerNorm/gamma',\n",
       " 'pegasus/encoder/layer_2/attention/self/query/kernel',\n",
       " 'pegasus/encoder/layer_2/attention/self/key/kernel',\n",
       " 'pegasus/encoder/layer_2/attention/self/value/kernel',\n",
       " 'pegasus/encoder/layer_2/attention/self/Softmax',\n",
       " 'pegasus/encoder/layer_2/attention/self/Softmax_1',\n",
       " 'pegasus/encoder/layer_2/attention/self/Softmax_2',\n",
       " 'pegasus/encoder/layer_2/attention/self/Softmax_3',\n",
       " 'pegasus/encoder/layer_2/attention/self/Softmax_4',\n",
       " 'pegasus/encoder/layer_2/attention/output/dense/kernel',\n",
       " 'pegasus/encoder/layer_2/intermediate/LayerNorm/gamma',\n",
       " 'pegasus/encoder/layer_2/intermediate/dense/kernel',\n",
       " 'pegasus/encoder/layer_2/intermediate/dense/bias',\n",
       " 'pegasus/encoder/layer_2/output/dense/kernel',\n",
       " 'pegasus/encoder/layer_2/output/dense/bias',\n",
       " 'pegasus/encoder/layer_3/attention/self/LayerNorm/gamma',\n",
       " 'pegasus/encoder/layer_3/attention/self/query/kernel',\n",
       " 'pegasus/encoder/layer_3/attention/self/key/kernel',\n",
       " 'pegasus/encoder/layer_3/attention/self/value/kernel',\n",
       " 'pegasus/encoder/layer_3/attention/self/Softmax',\n",
       " 'pegasus/encoder/layer_3/attention/self/Softmax_1',\n",
       " 'pegasus/encoder/layer_3/attention/self/Softmax_2',\n",
       " 'pegasus/encoder/layer_3/attention/self/Softmax_3',\n",
       " 'pegasus/encoder/layer_3/attention/self/Softmax_4',\n",
       " 'pegasus/encoder/layer_3/attention/output/dense/kernel',\n",
       " 'pegasus/encoder/layer_3/intermediate/LayerNorm/gamma',\n",
       " 'pegasus/encoder/layer_3/intermediate/dense/kernel',\n",
       " 'pegasus/encoder/layer_3/intermediate/dense/bias',\n",
       " 'pegasus/encoder/layer_3/output/dense/kernel',\n",
       " 'pegasus/encoder/layer_3/output/dense/bias',\n",
       " 'pegasus/encoder/layer_4/attention/self/LayerNorm/gamma',\n",
       " 'pegasus/encoder/layer_4/attention/self/query/kernel',\n",
       " 'pegasus/encoder/layer_4/attention/self/key/kernel',\n",
       " 'pegasus/encoder/layer_4/attention/self/value/kernel',\n",
       " 'pegasus/encoder/layer_4/attention/self/Softmax',\n",
       " 'pegasus/encoder/layer_4/attention/self/Softmax_1',\n",
       " 'pegasus/encoder/layer_4/attention/self/Softmax_2',\n",
       " 'pegasus/encoder/layer_4/attention/self/Softmax_3',\n",
       " 'pegasus/encoder/layer_4/attention/self/Softmax_4',\n",
       " 'pegasus/encoder/layer_4/attention/output/dense/kernel',\n",
       " 'pegasus/encoder/layer_4/intermediate/LayerNorm/gamma',\n",
       " 'pegasus/encoder/layer_4/intermediate/dense/kernel',\n",
       " 'pegasus/encoder/layer_4/intermediate/dense/bias',\n",
       " 'pegasus/encoder/layer_4/output/dense/kernel',\n",
       " 'pegasus/encoder/layer_4/output/dense/bias',\n",
       " 'pegasus/encoder/layer_5/attention/self/LayerNorm/gamma',\n",
       " 'pegasus/encoder/layer_5/attention/self/query/kernel',\n",
       " 'pegasus/encoder/layer_5/attention/self/key/kernel',\n",
       " 'pegasus/encoder/layer_5/attention/self/value/kernel',\n",
       " 'pegasus/encoder/layer_5/attention/self/Softmax',\n",
       " 'pegasus/encoder/layer_5/attention/self/Softmax_1',\n",
       " 'pegasus/encoder/layer_5/attention/self/Softmax_2',\n",
       " 'pegasus/encoder/layer_5/attention/self/Softmax_3',\n",
       " 'pegasus/encoder/layer_5/attention/self/Softmax_4',\n",
       " 'pegasus/encoder/layer_5/attention/output/dense/kernel',\n",
       " 'pegasus/encoder/layer_5/intermediate/LayerNorm/gamma',\n",
       " 'pegasus/encoder/layer_5/intermediate/dense/kernel',\n",
       " 'pegasus/encoder/layer_5/intermediate/dense/bias',\n",
       " 'pegasus/encoder/layer_5/output/dense/kernel',\n",
       " 'pegasus/encoder/layer_5/output/dense/bias',\n",
       " 'pegasus/encoder/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_0/attention/self/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_0/attention/self/query/kernel',\n",
       " 'pegasus/decoder/layer_0/attention/self/key/kernel',\n",
       " 'pegasus/decoder/layer_0/attention/self/value/kernel',\n",
       " 'pegasus/while/decoder/layer_0/attention/self/Softmax',\n",
       " 'pegasus/decoder/layer_0/attention/output/dense/kernel',\n",
       " 'pegasus/decoder/layer_0/attention/encdec/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_0/attention/encdec/query/kernel',\n",
       " 'pegasus/decoder/layer_0/attention/encdec/key/kernel',\n",
       " 'pegasus/decoder/layer_0/attention/encdec/value/kernel',\n",
       " 'pegasus/decoder/layer_0/attention/encdec_output/dense/kernel',\n",
       " 'pegasus/decoder/layer_0/intermediate/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_0/intermediate/dense/kernel',\n",
       " 'pegasus/decoder/layer_0/intermediate/dense/bias',\n",
       " 'pegasus/decoder/layer_0/output/dense/kernel',\n",
       " 'pegasus/decoder/layer_0/output/dense/bias',\n",
       " 'pegasus/decoder/layer_1/attention/self/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_1/attention/self/query/kernel',\n",
       " 'pegasus/decoder/layer_1/attention/self/key/kernel',\n",
       " 'pegasus/decoder/layer_1/attention/self/value/kernel',\n",
       " 'pegasus/while/decoder/layer_1/attention/self/Softmax',\n",
       " 'pegasus/decoder/layer_1/attention/output/dense/kernel',\n",
       " 'pegasus/decoder/layer_1/attention/encdec/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_1/attention/encdec/query/kernel',\n",
       " 'pegasus/decoder/layer_1/attention/encdec/key/kernel',\n",
       " 'pegasus/decoder/layer_1/attention/encdec/value/kernel',\n",
       " 'pegasus/decoder/layer_1/attention/encdec_output/dense/kernel',\n",
       " 'pegasus/decoder/layer_1/intermediate/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_1/intermediate/dense/kernel',\n",
       " 'pegasus/decoder/layer_1/intermediate/dense/bias',\n",
       " 'pegasus/decoder/layer_1/output/dense/kernel',\n",
       " 'pegasus/decoder/layer_1/output/dense/bias',\n",
       " 'pegasus/decoder/layer_2/attention/self/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_2/attention/self/query/kernel',\n",
       " 'pegasus/decoder/layer_2/attention/self/key/kernel',\n",
       " 'pegasus/decoder/layer_2/attention/self/value/kernel',\n",
       " 'pegasus/while/decoder/layer_2/attention/self/Softmax',\n",
       " 'pegasus/decoder/layer_2/attention/output/dense/kernel',\n",
       " 'pegasus/decoder/layer_2/attention/encdec/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_2/attention/encdec/query/kernel',\n",
       " 'pegasus/decoder/layer_2/attention/encdec/key/kernel',\n",
       " 'pegasus/decoder/layer_2/attention/encdec/value/kernel',\n",
       " 'pegasus/decoder/layer_2/attention/encdec_output/dense/kernel',\n",
       " 'pegasus/decoder/layer_2/intermediate/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_2/intermediate/dense/kernel',\n",
       " 'pegasus/decoder/layer_2/intermediate/dense/bias',\n",
       " 'pegasus/decoder/layer_2/output/dense/kernel',\n",
       " 'pegasus/decoder/layer_2/output/dense/bias',\n",
       " 'pegasus/decoder/layer_3/attention/self/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_3/attention/self/query/kernel',\n",
       " 'pegasus/decoder/layer_3/attention/self/key/kernel',\n",
       " 'pegasus/decoder/layer_3/attention/self/value/kernel',\n",
       " 'pegasus/while/decoder/layer_3/attention/self/Softmax',\n",
       " 'pegasus/decoder/layer_3/attention/output/dense/kernel',\n",
       " 'pegasus/decoder/layer_3/attention/encdec/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_3/attention/encdec/query/kernel',\n",
       " 'pegasus/decoder/layer_3/attention/encdec/key/kernel',\n",
       " 'pegasus/decoder/layer_3/attention/encdec/value/kernel',\n",
       " 'pegasus/decoder/layer_3/attention/encdec_output/dense/kernel',\n",
       " 'pegasus/decoder/layer_3/intermediate/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_3/intermediate/dense/kernel',\n",
       " 'pegasus/decoder/layer_3/intermediate/dense/bias',\n",
       " 'pegasus/decoder/layer_3/output/dense/kernel',\n",
       " 'pegasus/decoder/layer_3/output/dense/bias',\n",
       " 'pegasus/decoder/layer_4/attention/self/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_4/attention/self/query/kernel',\n",
       " 'pegasus/decoder/layer_4/attention/self/key/kernel',\n",
       " 'pegasus/decoder/layer_4/attention/self/value/kernel',\n",
       " 'pegasus/while/decoder/layer_4/attention/self/Softmax',\n",
       " 'pegasus/decoder/layer_4/attention/output/dense/kernel',\n",
       " 'pegasus/decoder/layer_4/attention/encdec/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_4/attention/encdec/query/kernel',\n",
       " 'pegasus/decoder/layer_4/attention/encdec/key/kernel',\n",
       " 'pegasus/decoder/layer_4/attention/encdec/value/kernel',\n",
       " 'pegasus/decoder/layer_4/attention/encdec_output/dense/kernel',\n",
       " 'pegasus/decoder/layer_4/intermediate/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_4/intermediate/dense/kernel',\n",
       " 'pegasus/decoder/layer_4/intermediate/dense/bias',\n",
       " 'pegasus/decoder/layer_4/output/dense/kernel',\n",
       " 'pegasus/decoder/layer_4/output/dense/bias',\n",
       " 'pegasus/decoder/layer_5/attention/self/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_5/attention/self/query/kernel',\n",
       " 'pegasus/decoder/layer_5/attention/self/key/kernel',\n",
       " 'pegasus/decoder/layer_5/attention/self/value/kernel',\n",
       " 'pegasus/while/decoder/layer_5/attention/self/Softmax',\n",
       " 'pegasus/decoder/layer_5/attention/output/dense/kernel',\n",
       " 'pegasus/decoder/layer_5/attention/encdec/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_5/attention/encdec/query/kernel',\n",
       " 'pegasus/decoder/layer_5/attention/encdec/key/kernel',\n",
       " 'pegasus/decoder/layer_5/attention/encdec/value/kernel',\n",
       " 'pegasus/decoder/layer_5/attention/encdec_output/dense/kernel',\n",
       " 'pegasus/decoder/layer_5/intermediate/LayerNorm/gamma',\n",
       " 'pegasus/decoder/layer_5/intermediate/dense/kernel',\n",
       " 'pegasus/decoder/layer_5/intermediate/dense/bias',\n",
       " 'pegasus/decoder/layer_5/output/dense/kernel',\n",
       " 'pegasus/decoder/layer_5/output/dense/bias',\n",
       " 'pegasus/decoder/LayerNorm/gamma',\n",
       " 'pegasus/while/decoder/layer_0/attention/self/Softmax_1',\n",
       " 'pegasus/while/decoder/layer_1/attention/self/Softmax_1',\n",
       " 'pegasus/while/decoder/layer_2/attention/self/Softmax_1',\n",
       " 'pegasus/while/decoder/layer_3/attention/self/Softmax_1',\n",
       " 'pegasus/while/decoder/layer_4/attention/self/Softmax_1',\n",
       " 'pegasus/while/decoder/layer_5/attention/self/Softmax_1',\n",
       " 'pegasus/logits',\n",
       " 'logits']"
      ]
     },
     "execution_count": 43,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "strings = ','.join(\n",
    "    [\n",
    "        n.name\n",
    "        for n in tf.get_default_graph().as_graph_def().node\n",
    "        if ('Variable' in n.op\n",
    "        or 'Placeholder' in n.name\n",
    "        or 'top_p' in n.name\n",
    "        or 'temperature' in n.name\n",
    "        or 'logits' in n.name\n",
    "        or 'alphas' in n.name\n",
    "        or 'self/Softmax' in n.name)\n",
    "        and 'adam' not in n.name\n",
    "        and 'beta' not in n.name\n",
    "        and 'global_step' not in n.name\n",
    "        and 'gradients' not in n.name\n",
    "    ]\n",
    ")\n",
    "strings.split(',')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 44,
   "metadata": {},
   "outputs": [],
   "source": [
    "def freeze_graph(model_dir, output_node_names):\n",
    "\n",
    "    if not tf.gfile.Exists(model_dir):\n",
    "        raise AssertionError(\n",
    "            \"Export directory doesn't exists. Please specify an export \"\n",
    "            'directory: %s' % model_dir\n",
    "        )\n",
    "\n",
    "    checkpoint = tf.train.get_checkpoint_state(model_dir)\n",
    "    input_checkpoint = checkpoint.model_checkpoint_path\n",
    "\n",
    "    absolute_model_dir = '/'.join(input_checkpoint.split('/')[:-1])\n",
    "    output_graph = absolute_model_dir + '/frozen_model.pb'\n",
    "    clear_devices = True\n",
    "    with tf.Session(graph = tf.Graph()) as sess:\n",
    "        saver = tf.train.import_meta_graph(\n",
    "            input_checkpoint + '.meta', clear_devices = clear_devices\n",
    "        )\n",
    "        saver.restore(sess, input_checkpoint)\n",
    "        output_graph_def = tf.graph_util.convert_variables_to_constants(\n",
    "            sess,\n",
    "            tf.get_default_graph().as_graph_def(),\n",
    "            output_node_names.split(','),\n",
    "        )\n",
    "        with tf.gfile.GFile(output_graph, 'wb') as f:\n",
    "            f.write(output_graph_def.SerializeToString())\n",
    "        print('%d ops in the final graph.' % len(output_graph_def.node))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 45,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Restoring parameters from output/model.ckpt\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Restoring parameters from output/model.ckpt\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From <ipython-input-44-9a7215a4e58a>:23: convert_variables_to_constants (from tensorflow.python.framework.graph_util_impl) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use `tf.compat.v1.graph_util.convert_variables_to_constants`\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From <ipython-input-44-9a7215a4e58a>:23: convert_variables_to_constants (from tensorflow.python.framework.graph_util_impl) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use `tf.compat.v1.graph_util.convert_variables_to_constants`\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From /home/husein/.local/lib/python3.6/site-packages/tensorflow_core/python/framework/graph_util_impl.py:277: extract_sub_graph (from tensorflow.python.framework.graph_util_impl) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use `tf.compat.v1.graph_util.extract_sub_graph`\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From /home/husein/.local/lib/python3.6/site-packages/tensorflow_core/python/framework/graph_util_impl.py:277: extract_sub_graph (from tensorflow.python.framework.graph_util_impl) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use `tf.compat.v1.graph_util.extract_sub_graph`\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Froze 186 variables.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Froze 186 variables.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Converted 186 variables to const ops.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:tensorflow:Converted 186 variables to const ops.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "12524 ops in the final graph.\n"
     ]
    }
   ],
   "source": [
    "freeze_graph('output', strings)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 46,
   "metadata": {},
   "outputs": [],
   "source": [
    "from tensorflow.tools.graph_transforms import TransformGraph"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 47,
   "metadata": {},
   "outputs": [],
   "source": [
    "transforms = ['add_default_attributes',\n",
    "             'remove_nodes(op=Identity, op=CheckNumerics, op=Dropout)',\n",
    "             'fold_batch_norms',\n",
    "             'fold_old_batch_norms',\n",
    "             'quantize_weights(fallback_min=-10, fallback_max=10)',\n",
    "             'strip_unused_nodes',\n",
    "             'sort_by_execution_order']"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 48,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From <ipython-input-48-f9c2d7850f78>:4: FastGFile.__init__ (from tensorflow.python.platform.gfile) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use tf.gfile.GFile.\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:From <ipython-input-48-f9c2d7850f78>:4: FastGFile.__init__ (from tensorflow.python.platform.gfile) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use tf.gfile.GFile.\n"
     ]
    }
   ],
   "source": [
    "pb = 'output/frozen_model.pb'\n",
    "\n",
    "input_graph_def = tf.GraphDef()\n",
    "with tf.gfile.FastGFile(pb, 'rb') as f:\n",
    "    input_graph_def.ParseFromString(f.read())\n",
    "        \n",
    "inputs = ['Placeholder', 'top_p', 'temperature']\n",
    "transformed_graph_def = TransformGraph(input_graph_def, \n",
    "                                       inputs,\n",
    "                                       ['logits'], transforms)\n",
    "\n",
    "with tf.gfile.GFile(f'{pb}.quantized', 'wb') as f:\n",
    "    f.write(transformed_graph_def.SerializeToString())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "metadata": {},
   "outputs": [],
   "source": [
    "def load_graph(frozen_graph_filename, **kwargs):\n",
    "    with tf.gfile.GFile(frozen_graph_filename, 'rb') as f:\n",
    "        graph_def = tf.GraphDef()\n",
    "        graph_def.ParseFromString(f.read())\n",
    "\n",
    "    # https://github.com/onnx/tensorflow-onnx/issues/77#issuecomment-445066091\n",
    "    # to fix import T5\n",
    "    for node in graph_def.node:\n",
    "        if node.op == 'RefSwitch':\n",
    "            node.op = 'Switch'\n",
    "            for index in xrange(len(node.input)):\n",
    "                if 'moving_' in node.input[index]:\n",
    "                    node.input[index] = node.input[index] + '/read'\n",
    "        elif node.op == 'AssignSub':\n",
    "            node.op = 'Sub'\n",
    "            if 'use_locking' in node.attr:\n",
    "                del node.attr['use_locking']\n",
    "        elif node.op == 'AssignAdd':\n",
    "            node.op = 'Add'\n",
    "            if 'use_locking' in node.attr:\n",
    "                del node.attr['use_locking']\n",
    "        elif node.op == 'Assign':\n",
    "            node.op = 'Identity'\n",
    "            if 'use_locking' in node.attr:\n",
    "                del node.attr['use_locking']\n",
    "            if 'validate_shape' in node.attr:\n",
    "                del node.attr['validate_shape']\n",
    "            if len(node.input) == 2:\n",
    "                node.input[0] = node.input[1]\n",
    "                del node.input[1]\n",
    "\n",
    "    with tf.Graph().as_default() as graph:\n",
    "        tf.import_graph_def(graph_def)\n",
    "    return graph"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 50,
   "metadata": {},
   "outputs": [],
   "source": [
    "g = load_graph('output/frozen_model.pb')\n",
    "x = g.get_tensor_by_name('import/Placeholder:0')\n",
    "top_p = g.get_tensor_by_name('import/top_p:0')\n",
    "temperature = g.get_tensor_by_name('import/temperature:0')\n",
    "logits = g.get_tensor_by_name('import/logits:0')\n",
    "test_sess = tf.InteractiveSession(graph = g)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 51,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "CPU times: user 45.5 s, sys: 4.43 s, total: 49.9 s\n",
      "Wall time: 5.82 s\n"
     ]
    }
   ],
   "source": [
    "%%time\n",
    "l = test_sess.run(logits, feed_dict = {x: s, top_p: 0.0, temperature: 0.0})"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.9"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
