{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "24ae5547",
   "metadata": {},
   "outputs": [],
   "source": [
    "import os\n",
    "\n",
    "os.environ['CUDA_VISIBLE_DEVICES'] = ''\n",
    "os.environ['TF_FORCE_GPU_ALLOW_GROWTH'] = 'true'"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "3395870c",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2022-07-11 18:59:16.217758: E tensorflow/stream_executor/cuda/cuda_driver.cc:271] failed call to cuInit: CUDA_ERROR_NO_DEVICE: no CUDA-capable device is detected\n",
      "2022-07-11 18:59:16.217778: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:169] retrieving CUDA diagnostic information for host: huseincomel-desktop\n",
      "2022-07-11 18:59:16.217782: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:176] hostname: huseincomel-desktop\n",
      "2022-07-11 18:59:16.217817: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:200] libcuda reported version is: Not found: was unable to find libcuda.so DSO loaded into this program\n",
      "2022-07-11 18:59:16.217836: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:204] kernel reported version is: 470.129.6\n",
      "2022-07-11 18:59:16.218135: I tensorflow/core/platform/cpu_feature_guard.cc:142] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations:  AVX2 AVX512F FMA\n",
      "To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.\n",
      "/home/ubuntu/.local/lib/python3.8/site-packages/apex/pyprof/__init__.py:5: FutureWarning: pyprof will be removed by the end of June, 2022\n",
      "  warnings.warn(\"pyprof will be removed by the end of June, 2022\", FutureWarning)\n"
     ]
    }
   ],
   "source": [
    "from transformers import T5Config, TFT5Model, T5Model, load_tf_weights_in_t5, T5Tokenizer\n",
    "from transformers import T5ForConditionalGeneration, TFT5ForConditionalGeneration"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "06db4241",
   "metadata": {},
   "outputs": [],
   "source": [
    "!rm -rf temp"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "a541b0ab",
   "metadata": {},
   "outputs": [],
   "source": [
    "import os\n",
    "\n",
    "out = 'temp'\n",
    "os.makedirs(out, exist_ok=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "48753c21",
   "metadata": {},
   "outputs": [],
   "source": [
    "config = T5Config.from_pretrained('malay-huggingface/t5-small-bahasa-cased')\n",
    "config.save_pretrained(out)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "50a47b1d",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "checkpoint\r\n",
      "events.out.tfevents.1657262576.huseincomel-desktop\r\n",
      "events.out.tfevents.1657273096.huseincomel-desktop\r\n",
      "events.out.tfevents.1657378269.huseincomel-desktop\r\n",
      "events.out.tfevents.1657386478.huseincomel-desktop\r\n",
      "events.out.tfevents.1657527614.huseincomel-desktop\r\n",
      "graph.pbtxt\r\n",
      "model.ckpt-1023200.data-00000-of-00002\r\n",
      "model.ckpt-1023200.data-00001-of-00002\r\n",
      "model.ckpt-1023200.index\r\n",
      "model.ckpt-1023200.meta\r\n",
      "model.ckpt-1048200.data-00000-of-00002\r\n",
      "model.ckpt-1048200.data-00001-of-00002\r\n",
      "model.ckpt-1048200.index\r\n",
      "model.ckpt-1048200.meta\r\n",
      "model.ckpt-1074200.data-00000-of-00002\r\n",
      "model.ckpt-1074200.data-00001-of-00002\r\n",
      "model.ckpt-1074200.index\r\n",
      "model.ckpt-1074200.meta\r\n",
      "model.ckpt-1099200.data-00000-of-00002\r\n",
      "model.ckpt-1099200.data-00001-of-00002\r\n",
      "model.ckpt-1099200.index\r\n",
      "model.ckpt-1099200.meta\r\n",
      "model.ckpt-1125200.data-00000-of-00002\r\n",
      "model.ckpt-1125200.data-00001-of-00002\r\n",
      "model.ckpt-1125200.index\r\n",
      "model.ckpt-1125200.meta\r\n",
      "model.ckpt-1150200.data-00000-of-00002\r\n",
      "model.ckpt-1150200.data-00001-of-00002\r\n",
      "model.ckpt-1150200.index\r\n",
      "model.ckpt-1150200.meta\r\n",
      "model.ckpt-1211200.data-00000-of-00002\r\n",
      "model.ckpt-1211200.data-00001-of-00002\r\n",
      "model.ckpt-1211200.index\r\n",
      "model.ckpt-1211200.meta\r\n",
      "model.ckpt-1233200.data-00000-of-00002\r\n",
      "model.ckpt-1233200.data-00001-of-00002\r\n",
      "model.ckpt-1233200.index\r\n",
      "model.ckpt-1233200.meta\r\n",
      "model.ckpt-1255200.data-00000-of-00002\r\n",
      "model.ckpt-1255200.data-00001-of-00002\r\n",
      "model.ckpt-1255200.index\r\n",
      "model.ckpt-1255200.meta\r\n",
      "model.ckpt-1277200.data-00000-of-00002\r\n",
      "model.ckpt-1277200.data-00001-of-00002\r\n",
      "model.ckpt-1277200.index\r\n",
      "model.ckpt-1277200.meta\r\n",
      "model.ckpt-1299200.data-00000-of-00002\r\n",
      "model.ckpt-1299200.data-00001-of-00002\r\n",
      "model.ckpt-1299200.index\r\n",
      "model.ckpt-1299200.meta\r\n",
      "model.ckpt-1321200.data-00000-of-00002\r\n",
      "model.ckpt-1321200.data-00001-of-00002\r\n",
      "model.ckpt-1321200.index\r\n",
      "model.ckpt-1321200.meta\r\n",
      "model.ckpt-1343200.data-00000-of-00002\r\n",
      "model.ckpt-1343200.data-00001-of-00002\r\n",
      "model.ckpt-1343200.index\r\n",
      "model.ckpt-1343200.meta\r\n",
      "model.ckpt-1365200.data-00000-of-00002\r\n",
      "model.ckpt-1365200.data-00001-of-00002\r\n",
      "model.ckpt-1365200.index\r\n",
      "model.ckpt-1365200.meta\r\n",
      "model.ckpt-1387200.data-00000-of-00002\r\n",
      "model.ckpt-1387200.data-00001-of-00002\r\n",
      "model.ckpt-1387200.index\r\n",
      "model.ckpt-1387200.meta\r\n",
      "model.ckpt-1409200.data-00000-of-00002\r\n",
      "model.ckpt-1409200.data-00001-of-00002\r\n",
      "model.ckpt-1409200.index\r\n",
      "model.ckpt-1409200.meta\r\n",
      "model.ckpt-1432200.data-00000-of-00002\r\n",
      "model.ckpt-1432200.data-00001-of-00002\r\n",
      "model.ckpt-1432200.index\r\n",
      "model.ckpt-1432200.meta\r\n",
      "model.ckpt-1455200.data-00000-of-00002\r\n",
      "model.ckpt-1455200.data-00001-of-00002\r\n",
      "model.ckpt-1455200.index\r\n",
      "model.ckpt-1455200.meta\r\n",
      "model.ckpt-1477200.data-00000-of-00002\r\n",
      "model.ckpt-1477200.data-00001-of-00002\r\n",
      "model.ckpt-1477200.index\r\n",
      "model.ckpt-1477200.meta\r\n",
      "model.ckpt-1500200.data-00000-of-00002\r\n",
      "model.ckpt-1500200.data-00001-of-00002\r\n",
      "model.ckpt-1500200.index\r\n",
      "model.ckpt-1500200.meta\r\n",
      "model.ckpt-1523200.data-00000-of-00002\r\n",
      "model.ckpt-1523200.data-00001-of-00002\r\n",
      "model.ckpt-1523200.index\r\n",
      "model.ckpt-1523200.meta\r\n",
      "model.ckpt-1567200.data-00000-of-00002\r\n",
      "model.ckpt-1567200.data-00001-of-00002\r\n",
      "model.ckpt-1567200.index\r\n",
      "model.ckpt-1567200.meta\r\n",
      "model.ckpt-1569200.data-00000-of-00002\r\n",
      "model.ckpt-1569200.data-00001-of-00002\r\n",
      "model.ckpt-1569200.index\r\n",
      "model.ckpt-1569200.meta\r\n",
      "model.ckpt-1570200.data-00000-of-00002\r\n",
      "model.ckpt-1570200.data-00001-of-00002\r\n",
      "model.ckpt-1570200.index\r\n",
      "model.ckpt-1570200.meta\r\n",
      "model.ckpt-1571200.data-00000-of-00002\r\n",
      "model.ckpt-1571200.data-00001-of-00002\r\n",
      "model.ckpt-1571200.index\r\n",
      "model.ckpt-1571200.meta\r\n",
      "model.ckpt-1572200.data-00000-of-00002\r\n",
      "model.ckpt-1572200.data-00001-of-00002\r\n",
      "model.ckpt-1572200.index\r\n",
      "model.ckpt-1572200.meta\r\n",
      "model.ckpt-1573200.data-00000-of-00002\r\n",
      "model.ckpt-1573200.data-00001-of-00002\r\n",
      "model.ckpt-1573200.index\r\n",
      "model.ckpt-1573200.meta\r\n",
      "model.ckpt-819200.data-00000-of-00002\r\n",
      "model.ckpt-819200.data-00001-of-00002\r\n",
      "model.ckpt-819200.index\r\n",
      "model.ckpt-819200.meta\r\n",
      "model.ckpt-845200.data-00000-of-00002\r\n",
      "model.ckpt-845200.data-00001-of-00002\r\n",
      "model.ckpt-845200.index\r\n",
      "model.ckpt-845200.meta\r\n",
      "model.ckpt-870200.data-00000-of-00002\r\n",
      "model.ckpt-870200.data-00001-of-00002\r\n",
      "model.ckpt-870200.index\r\n",
      "model.ckpt-870200.meta\r\n",
      "model.ckpt-896200.data-00000-of-00002\r\n",
      "model.ckpt-896200.data-00001-of-00002\r\n",
      "model.ckpt-896200.index\r\n",
      "model.ckpt-896200.meta\r\n",
      "model.ckpt-921200.data-00000-of-00002\r\n",
      "model.ckpt-921200.data-00001-of-00002\r\n",
      "model.ckpt-921200.index\r\n",
      "model.ckpt-921200.meta\r\n",
      "model.ckpt-947200.data-00000-of-00002\r\n",
      "model.ckpt-947200.data-00001-of-00002\r\n",
      "model.ckpt-947200.index\r\n",
      "model.ckpt-947200.meta\r\n",
      "model.ckpt-972200.data-00000-of-00002\r\n",
      "model.ckpt-972200.data-00001-of-00002\r\n",
      "model.ckpt-972200.index\r\n",
      "model.ckpt-972200.meta\r\n",
      "model.ckpt-998200.data-00000-of-00002\r\n",
      "model.ckpt-998200.data-00001-of-00002\r\n",
      "model.ckpt-998200.index\r\n",
      "model.ckpt-998200.meta\r\n",
      "operative_config.gin\r\n"
     ]
    }
   ],
   "source": [
    "!ls t5-small-noisy-ms-en"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "f54394ad",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "T5Model(\n",
       "  (shared): Embedding(32128, 512)\n",
       "  (encoder): T5Stack(\n",
       "    (embed_tokens): Embedding(32128, 512)\n",
       "    (block): ModuleList(\n",
       "      (0): T5Block(\n",
       "        (layer): ModuleList(\n",
       "          (0): T5LayerSelfAttention(\n",
       "            (SelfAttention): T5Attention(\n",
       "              (q): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (k): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (v): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (o): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (relative_attention_bias): Embedding(32, 8)\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "          (1): T5LayerFF(\n",
       "            (DenseReluDense): T5DenseReluDense(\n",
       "              (wi): Linear(in_features=512, out_features=2048, bias=False)\n",
       "              (wo): Linear(in_features=2048, out_features=512, bias=False)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "              (relu_act): ReLU()\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "      )\n",
       "      (1): T5Block(\n",
       "        (layer): ModuleList(\n",
       "          (0): T5LayerSelfAttention(\n",
       "            (SelfAttention): T5Attention(\n",
       "              (q): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (k): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (v): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (o): Linear(in_features=512, out_features=512, bias=False)\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "          (1): T5LayerFF(\n",
       "            (DenseReluDense): T5DenseReluDense(\n",
       "              (wi): Linear(in_features=512, out_features=2048, bias=False)\n",
       "              (wo): Linear(in_features=2048, out_features=512, bias=False)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "              (relu_act): ReLU()\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "      )\n",
       "      (2): T5Block(\n",
       "        (layer): ModuleList(\n",
       "          (0): T5LayerSelfAttention(\n",
       "            (SelfAttention): T5Attention(\n",
       "              (q): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (k): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (v): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (o): Linear(in_features=512, out_features=512, bias=False)\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "          (1): T5LayerFF(\n",
       "            (DenseReluDense): T5DenseReluDense(\n",
       "              (wi): Linear(in_features=512, out_features=2048, bias=False)\n",
       "              (wo): Linear(in_features=2048, out_features=512, bias=False)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "              (relu_act): ReLU()\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "      )\n",
       "      (3): T5Block(\n",
       "        (layer): ModuleList(\n",
       "          (0): T5LayerSelfAttention(\n",
       "            (SelfAttention): T5Attention(\n",
       "              (q): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (k): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (v): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (o): Linear(in_features=512, out_features=512, bias=False)\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "          (1): T5LayerFF(\n",
       "            (DenseReluDense): T5DenseReluDense(\n",
       "              (wi): Linear(in_features=512, out_features=2048, bias=False)\n",
       "              (wo): Linear(in_features=2048, out_features=512, bias=False)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "              (relu_act): ReLU()\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "      )\n",
       "      (4): T5Block(\n",
       "        (layer): ModuleList(\n",
       "          (0): T5LayerSelfAttention(\n",
       "            (SelfAttention): T5Attention(\n",
       "              (q): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (k): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (v): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (o): Linear(in_features=512, out_features=512, bias=False)\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "          (1): T5LayerFF(\n",
       "            (DenseReluDense): T5DenseReluDense(\n",
       "              (wi): Linear(in_features=512, out_features=2048, bias=False)\n",
       "              (wo): Linear(in_features=2048, out_features=512, bias=False)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "              (relu_act): ReLU()\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "      )\n",
       "      (5): T5Block(\n",
       "        (layer): ModuleList(\n",
       "          (0): T5LayerSelfAttention(\n",
       "            (SelfAttention): T5Attention(\n",
       "              (q): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (k): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (v): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (o): Linear(in_features=512, out_features=512, bias=False)\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "          (1): T5LayerFF(\n",
       "            (DenseReluDense): T5DenseReluDense(\n",
       "              (wi): Linear(in_features=512, out_features=2048, bias=False)\n",
       "              (wo): Linear(in_features=2048, out_features=512, bias=False)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "              (relu_act): ReLU()\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "      )\n",
       "    )\n",
       "    (final_layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "    (dropout): Dropout(p=0.1, inplace=False)\n",
       "  )\n",
       "  (decoder): T5Stack(\n",
       "    (embed_tokens): Embedding(32128, 512)\n",
       "    (block): ModuleList(\n",
       "      (0): T5Block(\n",
       "        (layer): ModuleList(\n",
       "          (0): T5LayerSelfAttention(\n",
       "            (SelfAttention): T5Attention(\n",
       "              (q): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (k): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (v): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (o): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (relative_attention_bias): Embedding(32, 8)\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "          (1): T5LayerCrossAttention(\n",
       "            (EncDecAttention): T5Attention(\n",
       "              (q): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (k): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (v): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (o): Linear(in_features=512, out_features=512, bias=False)\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "          (2): T5LayerFF(\n",
       "            (DenseReluDense): T5DenseReluDense(\n",
       "              (wi): Linear(in_features=512, out_features=2048, bias=False)\n",
       "              (wo): Linear(in_features=2048, out_features=512, bias=False)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "              (relu_act): ReLU()\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "      )\n",
       "      (1): T5Block(\n",
       "        (layer): ModuleList(\n",
       "          (0): T5LayerSelfAttention(\n",
       "            (SelfAttention): T5Attention(\n",
       "              (q): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (k): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (v): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (o): Linear(in_features=512, out_features=512, bias=False)\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "          (1): T5LayerCrossAttention(\n",
       "            (EncDecAttention): T5Attention(\n",
       "              (q): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (k): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (v): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (o): Linear(in_features=512, out_features=512, bias=False)\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "          (2): T5LayerFF(\n",
       "            (DenseReluDense): T5DenseReluDense(\n",
       "              (wi): Linear(in_features=512, out_features=2048, bias=False)\n",
       "              (wo): Linear(in_features=2048, out_features=512, bias=False)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "              (relu_act): ReLU()\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "      )\n",
       "      (2): T5Block(\n",
       "        (layer): ModuleList(\n",
       "          (0): T5LayerSelfAttention(\n",
       "            (SelfAttention): T5Attention(\n",
       "              (q): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (k): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (v): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (o): Linear(in_features=512, out_features=512, bias=False)\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "          (1): T5LayerCrossAttention(\n",
       "            (EncDecAttention): T5Attention(\n",
       "              (q): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (k): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (v): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (o): Linear(in_features=512, out_features=512, bias=False)\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "          (2): T5LayerFF(\n",
       "            (DenseReluDense): T5DenseReluDense(\n",
       "              (wi): Linear(in_features=512, out_features=2048, bias=False)\n",
       "              (wo): Linear(in_features=2048, out_features=512, bias=False)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "              (relu_act): ReLU()\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "      )\n",
       "      (3): T5Block(\n",
       "        (layer): ModuleList(\n",
       "          (0): T5LayerSelfAttention(\n",
       "            (SelfAttention): T5Attention(\n",
       "              (q): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (k): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (v): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (o): Linear(in_features=512, out_features=512, bias=False)\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "          (1): T5LayerCrossAttention(\n",
       "            (EncDecAttention): T5Attention(\n",
       "              (q): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (k): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (v): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (o): Linear(in_features=512, out_features=512, bias=False)\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "          (2): T5LayerFF(\n",
       "            (DenseReluDense): T5DenseReluDense(\n",
       "              (wi): Linear(in_features=512, out_features=2048, bias=False)\n",
       "              (wo): Linear(in_features=2048, out_features=512, bias=False)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "              (relu_act): ReLU()\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "      )\n",
       "      (4): T5Block(\n",
       "        (layer): ModuleList(\n",
       "          (0): T5LayerSelfAttention(\n",
       "            (SelfAttention): T5Attention(\n",
       "              (q): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (k): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (v): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (o): Linear(in_features=512, out_features=512, bias=False)\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "          (1): T5LayerCrossAttention(\n",
       "            (EncDecAttention): T5Attention(\n",
       "              (q): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (k): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (v): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (o): Linear(in_features=512, out_features=512, bias=False)\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "          (2): T5LayerFF(\n",
       "            (DenseReluDense): T5DenseReluDense(\n",
       "              (wi): Linear(in_features=512, out_features=2048, bias=False)\n",
       "              (wo): Linear(in_features=2048, out_features=512, bias=False)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "              (relu_act): ReLU()\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "      )\n",
       "      (5): T5Block(\n",
       "        (layer): ModuleList(\n",
       "          (0): T5LayerSelfAttention(\n",
       "            (SelfAttention): T5Attention(\n",
       "              (q): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (k): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (v): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (o): Linear(in_features=512, out_features=512, bias=False)\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "          (1): T5LayerCrossAttention(\n",
       "            (EncDecAttention): T5Attention(\n",
       "              (q): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (k): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (v): Linear(in_features=512, out_features=512, bias=False)\n",
       "              (o): Linear(in_features=512, out_features=512, bias=False)\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "          (2): T5LayerFF(\n",
       "            (DenseReluDense): T5DenseReluDense(\n",
       "              (wi): Linear(in_features=512, out_features=2048, bias=False)\n",
       "              (wo): Linear(in_features=2048, out_features=512, bias=False)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "              (relu_act): ReLU()\n",
       "            )\n",
       "            (layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "      )\n",
       "    )\n",
       "    (final_layer_norm): FusedRMSNorm(torch.Size([512]), eps=1e-06, elementwise_affine=True)\n",
       "    (dropout): Dropout(p=0.1, inplace=False)\n",
       "  )\n",
       ")"
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model = T5Model(config)\n",
    "load_tf_weights_in_t5(model, config, 't5-small-noisy-ms-en/model.ckpt-1573200')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "86b9d748",
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "tokenizer = T5Tokenizer('sp10m.cased.ms-en.model', padding = True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "7e68dcc6",
   "metadata": {},
   "outputs": [],
   "source": [
    "model.save_pretrained(out)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "737e7c80",
   "metadata": {},
   "outputs": [],
   "source": [
    "model_gen = T5ForConditionalGeneration.from_pretrained(out)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "005b412a",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2022-07-11 18:59:49.091230: W tensorflow/python/util/util.cc:348] Sets are not currently considered sequences, but this may change in the future, so consider avoiding using them.\n",
      "Some weights of the PyTorch model were not used when initializing the TF 2.0 model TFT5ForConditionalGeneration: ['decoder.embed_tokens.weight', 'encoder.embed_tokens.weight']\n",
      "- This IS expected if you are initializing TFT5ForConditionalGeneration from a PyTorch model trained on another task or with another architecture (e.g. initializing a TFBertForSequenceClassification model from a BertForPreTraining model).\n",
      "- This IS NOT expected if you are initializing TFT5ForConditionalGeneration from a PyTorch model that you expect to be exactly identical (e.g. initializing a TFBertForSequenceClassification model from a BertForSequenceClassification model).\n",
      "All the weights of TFT5ForConditionalGeneration were initialized from the PyTorch model.\n",
      "If your task is similar to the task the model of the checkpoint was trained on, you can already use TFT5ForConditionalGeneration for predictions without further training.\n"
     ]
    }
   ],
   "source": [
    "model_tf = TFT5ForConditionalGeneration.from_pretrained(out, from_pt = True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "fc1a66fe",
   "metadata": {},
   "outputs": [],
   "source": [
    "with open('ms-en-right.test') as fopen:\n",
    "    right = fopen.read().split('\\n')\n",
    "    \n",
    "with open('ms-en-left.test') as fopen:\n",
    "    left = fopen.read().split('\\n')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "3124e42d",
   "metadata": {},
   "outputs": [],
   "source": [
    "# model_gen.cuda()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "35abd80e",
   "metadata": {},
   "outputs": [],
   "source": [
    "# from tqdm import tqdm\n",
    "\n",
    "# batch_size = 16\n",
    "\n",
    "# results = []\n",
    "# for i in tqdm(range(0, len(left), batch_size)):\n",
    "#     input_ids = [{'input_ids': tokenizer.encode(f'terjemah Melayu ke Inggeris: {s}', return_tensors = 'pt')[0]} for s in left[i:i + batch_size]]\n",
    "#     padded = tokenizer.pad(input_ids, padding = 'longest')\n",
    "#     outputs = model_gen.generate(padded['input_ids'].cuda(), attention_mask = padded['attention_mask'].cuda(), max_length = 1000)\n",
    "#     for o in outputs:\n",
    "#         results.append(tokenizer.decode(o, skip_special_tokens=True))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "9a28a3e4",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████████████████████████████████████████████████████████████████████████████████████████████| 2/2 [00:02<00:00,  1.07s/it]\n"
     ]
    }
   ],
   "source": [
    "from tqdm import tqdm\n",
    "\n",
    "batch_size = 16\n",
    "\n",
    "results = []\n",
    "for i in tqdm(range(0, len(left[:30]), batch_size)):\n",
    "    input_ids = [{'input_ids': tokenizer.encode(f'terjemah Melayu ke Inggeris: {s}', return_tensors = 'pt')[0]} for s in left[i:i + batch_size]]\n",
    "    padded = tokenizer.pad(input_ids, padding = 'longest')\n",
    "    outputs = model_gen.generate(**padded, max_length = 1000)\n",
    "    for o in outputs:\n",
    "        results.append(tokenizer.decode(o, skip_special_tokens=True))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "eb550d6f",
   "metadata": {},
   "outputs": [],
   "source": [
    "from sacrebleu.metrics import BLEU, CHRF, TER\n",
    "\n",
    "bleu = BLEU()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "20530f31",
   "metadata": {},
   "outputs": [],
   "source": [
    "filtered_left, filtered_right = [], []\n",
    "for no, r in enumerate(results):\n",
    "    if len(r):\n",
    "        filtered_left.append(r)\n",
    "        filtered_right.append(right[no])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "e50b3fbd",
   "metadata": {},
   "outputs": [],
   "source": [
    "refs = [filtered_right]\n",
    "sys = filtered_left"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "id": "2ba0ce1c",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "The Penal Code (Malaysia) makes any sexual relationship between men and women under the age of 16 a rape whether or not it is committed by the woman's will.\n",
      "The Penal Code (Malaysia) makes any sexual contact between a man and a woman under the age of 16 rape whether it is done at the will of the woman.\n",
      "Kanun Keseksaan (Malaysia) menjadikan sebarang hubungan seksual antara lelaki dengan wanita di bawah umur 16 tahun sebagai rogol sama ada dilakukan dengan kerelaan wanita itu atau tidak.\n",
      "\n",
      "The \"waling-waling\" geese, also known as the \"Queen of the Philippines\" and also one of the national symbols of Malaysia, are also endemic in the region.\n",
      "The \"orchid\" orchid, also known as the \"Queen of the Philippines\" and also a national symbol of Malaysia, is also endemic in the region.\n",
      "Anggrek \"waling-waling\", yang juga dikenali sebagai \"Ratu Bunga Filipina\" dan juga salah satu simbol kebangsaan negara Malaysia, juga endemik dalam kawasan.\n",
      "\n",
      "God is All-knowing and All-wise.\n",
      "Allah is All-Knowing, All-Wise.\n",
      "Dan (ingatlah) Allah Maha Mengetahui, lagi Maha Bijaksana.\n",
      "\n",
      "Sebelimbingan, Kota Bangun, Kutai Kartanegara Sebelimbingan is a village in the district of Kota Bangun, Regency of Kutai Kartanegara, East Kalimantan, Indonesia.\n",
      "Neighborhood, City of Buildings, City of Kartanegara Neighborhood is a village in the village of Bangun, Regency of Kutai Kartanegara, East Kalimantan, Indonesia.\n",
      "Sebelimbingan, Kota Bangun, Kutai Kartanegara Sebelimbingan merupakan sebuah desa di kecamatan Kota Bangun, Kabupaten Kutai Kartanegara, Kalimantan Timur, Indonesia.\n",
      "\n",
      "According to the Indian census in 2001 Allapuram had an education rate (reading ability) of 76%, above the national average of 59.5%; with 53% of men and 47% of women able to read.\n",
      "According to the Indian census in 2001 Allapuram had an education rate (reading ability) of 76%, above the national average of 59.5%; with 53% of men and 47% of women able to read.\n",
      "Menurut bancian India pada tahun 2001 Allapuram memiliki kadar pendidikan (kebolehan membaca) 76%, melebihi kadar purata kebangsaan 59.5%; dengan 53% lelaki dan 47% wanita mampu membaca.\n",
      "\n",
      "He was sponsored by Li-Ning.\n",
      "He was sponsored by Li-Ning.\n",
      "Beliau telah ditaja oleh Li-Ning.\n",
      "\n",
      "The 2002 Federal Audit Report states that the project was approved for privatization through land exchange by the government in 1996 commenced its physical work by a two-corporate consortium in May 2000.\n",
      "The Federal Audit Report 2002 states that the project was approved privatization through a land acquisition by the government in 1996 and began physical work by a consortium of two companies in May 2000.\n",
      "Laporan Audit Persekutuan 2002 menyatakan projek ini diluluskan penswastaannya melalui pertukaran tanah oleh kerajaan pada 1996 itu dimulakan kerja fizikalnya oleh konsortium dua syarikat pada Mei 2000.\n",
      "\n",
      "Tawau MRSM was initially expected to start operating in February 2008 but it began operating after students enrolled in March 2008.\n",
      "Tawau MRSM was initially expected to start operations in February 2008 but began operating after students enrolled in March 2008.\n",
      "MRSM Tawau pada awalnya dijangka untuk mula beroperasi dalam Februari 2008 namun ia mula beroperasi selepas pelajar mendaftar pada bulan Mac 2008.\n",
      "\n",
      "Lost (Album Soul 3) Lost is Soul 3's first studio album.\n",
      "Lost (Soul 3 Album) Lost is the first studio album from Soul 3.\n",
      "Hilang (Album Soul 3) Hilang adalah album studio yang pertama dari Soul 3.\n",
      "\n",
      "May readers of this room be cheerful with their loved ones.\n",
      "Hope this readers of the room are happy next to their loved ones.\n",
      "Moga pembaca ruangan ini ceria di samping keluarga tersayang.\n",
      "\n",
      "\"As you have noticed over the past few days, I have been accused of causing racial disputes in the country and the denominationrs used sentences out of context and added them to strange fabrications,\" he said.\n",
      "\"As you have noticed over the last few days, I have been accused of causing racial disputes in this country and the defendants to use sentences outside the context and supplement them with strange fabrications within them,\" he said.\n",
      "\"Seperti yang Anda perhatikan selama beberapa hari terakhir, saya dituduh menyebabkan perselisihan rasial di negara ini dan para pencela menggunakan kalimat di luar konteks dan menambahkannya dengan pemalsuan yang aneh di dalamnya,\" ujarnya.\n",
      "\n",
      "We really lure people to come with Instagramable angles on this area.\n",
      "We do indeed attract people with the Instagramable corners of this area.\n",
      "Kita memang memancing orang datang dengan sudut-sudut yang Instagramable di areal ini.\n",
      "\n",
      "Brooke's era saw the arrival of Christian missionaries, bringing modern education and medicine to Sarawak.\n",
      "Brooke's era saw the arrival of Christian missionaries, bringing modern education and medicine to Sarawak.\n",
      "Brooke era menyaksikan ketibaan Kristian mubaligh, membawa pendidikan dan perubatan moden ke Sarawak.\n",
      "\n",
      "\"Sarawak\" is the largest state in Malaysia.\n",
      "\"Sarawak\" is the largest state in Malaysia.\n",
      "\"Sarawak\" merupakan negeri yang terbesar di Malaysia.\n",
      "\n",
      "Juillan Juillan is a commune in the Hautes-Pyrenees department in southwestern France.\n",
      "Juillan Juillan is a commune in the Hautes-Pyrenees department in southwestern France.\n",
      "Juillan Juillan ialah komun di jabatan Hautes-Pyrenees di barat-daya Perancis.\n",
      "\n",
      "Manchester City owners are interested in buying clubs in Malaysia\n",
      "Manchester City owner is interested in buying a club in Malaysia\n",
      "Pemilik Manchester City minat beli kelab di Malaysia\n",
      "\n",
      "\"To those who do not know the tip of the base and continue to punish without looking at the facts, I thank you too.\n",
      "\"To those who do not know the basics and continue to punish without looking at the facts, I would also like to thank you.\n",
      "\"Kepada mereka yang tidak tahu hujung pangkal dan terus menghukum tanpa melihat fakta sebenar, saya ucapkan terima kasih juga.\n",
      "\n",
      "He is fluent in German and Italian.\n",
      "He is fluent in German and Italian.\n",
      "Beliau fasih dalam bahasa Jerman dan Itali.\n",
      "\n",
      "These cakes can be served in many ways, either with a relatively young coconut grated (\"live skin\"), which is an old coconut, or with a spicy gravy (Terengganu).\n",
      "These apple cakes can be served in a variety of ways, either with relatively young coconut grit (\"live skin\"), which is coconut that is not old enough, or with spicy gravy (Terengganu).\n",
      "Kuih apam ini boleh dihidangkan dengan pelbagai cara, samaada dengan parutan kelapa yang agak muda (\"nyiur kulit hidup\"), iaitu kelapa yang masih belum cukup tua, atau dengan kuah berempah (Terengganu).\n",
      "\n",
      "Meanwhile, he praised the efforts of the AEON Group, which became the first retail network to launch e-wallets (electronic wallets) in line with the government's desire to digitally pay.\n",
      "Meanwhile, he praised the efforts of the AEON Group to become the first retailer to launch e-wallets (e-wallets) in line with the government's aspiration towards digital payments.\n",
      "Sementara itu, beliau memuji usaha Kumpulan AEON yang menjadi rangkaian runcit pertama melancarkan e-wallet (dompet elektronik) selaras dengan hasrat kerajaan menuju ke arah pembayaran secara digital.\n",
      "\n",
      "Refusing to accept the General Election holidays, private companies are being investigated\n",
      "Refusing to accept the coronation, the private company is under investigation\n",
      "Enggan iktiraf cuti umum Pertabalan Agong, syarikat swasta disiasat\n",
      "\n",
      "Askul Darreh Askul Darreh is an area located in Iran.\n",
      "Askul Darreh Askul Darreh is an area located in Iran.\n",
      "Askul Darreh Askul Darreh merupakan sebuah kawasan yang terletak di Iran.\n",
      "\n",
      "Meine Tante, deine Tante (1927 film) Meine Tante - deine Tante (English: My Aunt, Your Aunt) is a 1927 German comedy film directed by Carl Froelich starring Ralph Arthur Roberts, Angelo Ferrari and Henny Porten.\n",
      "Meine Tante, deine Tante (film 1927) Meine Tante - deine Tante (English: My Aunt, Your Aunt) is a 1927 German comedy film starring Carl Froelich starring Ralph Arthur Roberts, Angelo Ferrari and Henny Porten.\n",
      "Meine Tante, deine Tante (filem 1927) Meine Tante - deine Tante (Inggeris: My Aunt, Your Aunt) merupakan filem senyap komedi Jerman 1927 arahan Carl Froelich yang dibintangi Ralph Arthur Roberts, Angelo Ferrari dan Henny Porten.\n",
      "\n",
      "The parliamentary constituencies and the state legislature were gazetted following the 2003 constituency re-election process.\n",
      "The parliamentary constituencies and the state legislature were gazetted after the re-election process of the 2003 constituencies.\n",
      "Kawasan Parlimen dan Dewan Undangan Negeri ini telah diwartakan setelah proses persempadanan semula kawasan pilihan raya 2003.\n",
      "\n",
      "The English dialectal word \"culver\" appears to derive from the Latin \"columba\".\n",
      "The English dialectal word \"culver\" appears to derive from Latin \"columba\".\n",
      "The English dialectal word \"culver\" appears to derive from Latin \"columba\".\n",
      "\n",
      "Bryon Flitsch of MTV Buzzworthy said 'Popular Song' was a different view of Mika's previous attempt, stating that \"a passionate pace + interesting melodies + likes to play lyrics = perfect song bounce around for.\n",
      "Bryon Flitsch of MTV Buzzworthy said 'Popular Song' was a different view of Mika's earlier efforts, stating that \"soulful rhythm + catchy melody + playful lyrics = perfect song bounces around for.\n",
      "Bryon Flitsch daripada MTV Buzzworthy berkata' Popular Song 'adalah pandangan yang berbeza daripada usaha sebelumnya Mika itu, yang menyatakan bahawa \"rentak yang penuh semangat + melodi menarik + suka bermain lirik-lirik = lagu sempurna melantun sekitar untuk.\n",
      "\n",
      "The 9.3, 4.45 and 3.23 sq km islands will be redeemed in the Permatang Damar Sea waters, near Bayan Lepas.\n",
      "The 9.3, 4.45 and 3.23km square islands will be redeemed in the waters of Permatang Damar Sea, near Bayan Lepas.\n",
      "Pulau-pulau seluas 9.3, 4.45 dan 3.23km persegi akan ditebus guna di perairan Permatang Damar Laut, berdekatan Bayan Lepas.\n",
      "\n",
      "Jijoe soon withdrew and was replaced by Reza.\n",
      "Jijoe soon resigned and was replaced by Reza.\n",
      "Tidak lama kemudian Jijoe telah menarik diri dan telah digantikan oleh Reza.\n",
      "\n",
      "As of 2016, SKDOJ has 740 students, 47 teachers and 6 school staff.\n",
      "As of 2016, SKDOJ has 740 students, 47 teachers and 6 school staff.\n",
      "Setakat tahun 2016, SKDOJ mempunyai 740 murid, 47 guru dan 6 kakitangan sekolah.\n",
      "\n",
      "Akrab acts as a complement to co-operative linkages that will enable the cooperative to master the entire effective distribution chain especially in an effort to reduce prices through bulk purchases and efficient distribution facilities. \"\n",
      "Familiarity serves as a complement to co-operative linkages that will enable the cooperatives to dominate the entire effective distribution chain, especially in an effort to reduce commodity prices through bulk purchases and efficient distribution facilities \".\n",
      "Akrab berperanan sebagai pelengkap dalam co-operative linkages yang akan membolehkan koperasi menguasai keseluruhan rantaian pengedaran yang efektif khususnya dalam usaha mengurangkan harga barangan melalui pembelian secara pukal dan kemudahan pengedaran yang efisien\".\n",
      "\n",
      "False Dmitriy I and his Polish garrison were overthrown, and a boyar, Vasily Shuysky, was proclaimed tsar in 1606.\n",
      "False Dmitriy I and his Polish garrison were overthrown, and a boyar, Vasily Shuysky, was proclaimed tsar in 1606.\n",
      "False Dmitriy I and his Polish garrison were overthrown, and a boyar, Vasily Shuysky, was proclaimed tsar in 1606.\n",
      "\n",
      "Every year, the Budget always prioritizes the Special Fund for School Improvement and Maintenance.\n",
      "Every year, the Budget always prioritizes the Special Fund for School Improvement and Maintenance.\n",
      "Saban tahun, Bajet ini sentiasa memberi keutamaan kepada Tabung Khas Penambahbaikan dan Penyelenggaraan Sekolah.\n",
      "\n"
     ]
    }
   ],
   "source": [
    "for i in range(len(filtered_left)):\n",
    "    print(filtered_left[i])\n",
    "    print(filtered_right[i])\n",
    "    print(left[i])\n",
    "    print()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "afa050d8",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "['The Penal Code (Malaysia) makes any sexual contact between a man and a woman under the age of 16 rape whether it is done at the will of the woman.',\n",
       " 'The \"orchid\" orchid, also known as the \"Queen of the Philippines\" and also a national symbol of Malaysia, is also endemic in the region.',\n",
       " 'Allah is All-Knowing, All-Wise.',\n",
       " 'Neighborhood, City of Buildings, City of Kartanegara Neighborhood is a village in the village of Bangun, Regency of Kutai Kartanegara, East Kalimantan, Indonesia.',\n",
       " 'According to the Indian census in 2001 Allapuram had an education rate (reading ability) of 76%, above the national average of 59.5%; with 53% of men and 47% of women able to read.',\n",
       " 'He was sponsored by Li-Ning.',\n",
       " 'The Federal Audit Report 2002 states that the project was approved privatization through a land acquisition by the government in 1996 and began physical work by a consortium of two companies in May 2000.',\n",
       " 'Tawau MRSM was initially expected to start operations in February 2008 but began operating after students enrolled in March 2008.',\n",
       " 'Lost (Soul 3 Album) Lost is the first studio album from Soul 3.',\n",
       " 'Hope this readers of the room are happy next to their loved ones.',\n",
       " '\"As you have noticed over the last few days, I have been accused of causing racial disputes in this country and the defendants to use sentences outside the context and supplement them with strange fabrications within them,\" he said.',\n",
       " 'We do indeed attract people with the Instagramable corners of this area.',\n",
       " \"Brooke's era saw the arrival of Christian missionaries, bringing modern education and medicine to Sarawak.\",\n",
       " '\"Sarawak\" is the largest state in Malaysia.',\n",
       " 'Juillan Juillan is a commune in the Hautes-Pyrenees department in southwestern France.',\n",
       " 'Manchester City owner is interested in buying a club in Malaysia',\n",
       " '\"To those who do not know the basics and continue to punish without looking at the facts, I would also like to thank you.',\n",
       " 'He is fluent in German and Italian.',\n",
       " 'These apple cakes can be served in a variety of ways, either with relatively young coconut grit (\"live skin\"), which is coconut that is not old enough, or with spicy gravy (Terengganu).',\n",
       " \"Meanwhile, he praised the efforts of the AEON Group to become the first retailer to launch e-wallets (e-wallets) in line with the government's aspiration towards digital payments.\",\n",
       " 'Refusing to accept the coronation, the private company is under investigation',\n",
       " 'Askul Darreh Askul Darreh is an area located in Iran.',\n",
       " 'Meine Tante, deine Tante (film 1927) Meine Tante - deine Tante (English: My Aunt, Your Aunt) is a 1927 German comedy film starring Carl Froelich starring Ralph Arthur Roberts, Angelo Ferrari and Henny Porten.',\n",
       " 'The parliamentary constituencies and the state legislature were gazetted after the re-election process of the 2003 constituencies.',\n",
       " 'The English dialectal word \"culver\" appears to derive from Latin \"columba\".',\n",
       " 'Bryon Flitsch of MTV Buzzworthy said \\'Popular Song\\' was a different view of Mika\\'s earlier efforts, stating that \"soulful rhythm + catchy melody + playful lyrics = perfect song bounces around for.',\n",
       " 'The 9.3, 4.45 and 3.23km square islands will be redeemed in the waters of Permatang Damar Sea, near Bayan Lepas.',\n",
       " 'Jijoe soon resigned and was replaced by Reza.',\n",
       " 'As of 2016, SKDOJ has 740 students, 47 teachers and 6 school staff.',\n",
       " 'Familiarity serves as a complement to co-operative linkages that will enable the cooperatives to dominate the entire effective distribution chain, especially in an effort to reduce commodity prices through bulk purchases and efficient distribution facilities \".',\n",
       " 'False Dmitriy I and his Polish garrison were overthrown, and a boyar, Vasily Shuysky, was proclaimed tsar in 1606.',\n",
       " 'Every year, the Budget always prioritizes the Special Fund for School Improvement and Maintenance.']"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "filtered_right"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "id": "ecc03af7",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[\"The Penal Code (Malaysia) makes any sexual relationship between men and women under the age of 16 a rape whether or not it is committed to the woman's will.\",\n",
       " 'The \"waling-waling\" quake, also known as the \"Queen of the Philippines\" and also one of the national symbols of Malaysia, is also endemic in the area.',\n",
       " 'God is All-knowing, All-wise.',\n",
       " 'Sebelimbingan, Kota Bangun, Kutai Kartanegara Sebelimbingan is a village in the district of Kota Bangun, Regency of Kutai Kartanegara, East Kalimantan, Indonesia.',\n",
       " 'According to the Indian census in 2001 Allapuram had an education rate (reading ability) of 76%, above the national average of 59.5%; with 53% of men and 47% of women able to read.',\n",
       " 'He was sponsored by Li-Ning.',\n",
       " 'The 2002 Federal Audit Report states that the project was approved by privatization through land exchange by the government in 1996 that began its physical work by a two-corporate consortium in May 2000.',\n",
       " 'Tawau MRSM was initially expected to start operating in February 2008 but it began operating after students enrolled in March 2008.',\n",
       " \"Lost (Life Soul 3 Album) Lost is Soul 3's first studio album.\",\n",
       " 'May readers of this room be cheerful with their loved ones.',\n",
       " '\"As you have noticed over the past few days, I have been accused of causing racial disputes in the country and the denominations used sentences out of context and added them to strange counterfeits in it,\" he said.',\n",
       " 'We really lure people to come with Instagramable angles on this area.',\n",
       " 'Brooke era saw the arrival of Christian missionaries, bringing modern education and medicine to Sarawak.',\n",
       " '\"Sarawak\" is the largest state in Malaysia.',\n",
       " 'Juillan Juillan is a commune in the Hautes-Pyrenees department in southwestern France.',\n",
       " 'Manchester City owners are interested in buying clubs in Malaysia',\n",
       " '\"To those who do not know the tip of the base and continue to punish without looking at the facts, I thank you too.',\n",
       " 'He is fluent in German and Italian.',\n",
       " 'This cake can be served in many ways, either with a relatively young coconut grated (\"live skin\"), which is an old coconut, or with a spicy gravy (Terengganu).',\n",
       " \"Meanwhile, he praised the efforts of the AEON Group, which became the first retail network to launch e-wallets (electronic wallets) in line with the government's desire to digitally pay.\",\n",
       " 'Refusing to recognize the General Election holidays, private companies are being investigated',\n",
       " 'Askul Darreh Askul Darreh is an area located in Iran.',\n",
       " 'Meine Tante, deine Tante (1927 film) Meine Tante - deine Tante (English: My Aunt, Your Aunt) is a 1927 German comedy film directed by Carl Froelich starring Ralph Arthur Roberts, Angelo Ferrari and Henny Porten.',\n",
       " 'The parliamentary constituencies and the state legislature were gazetted following the 2003 electoral redistricting process.',\n",
       " 'The English dialectal word \"culver\" appears to derive from Latin \"columba\".',\n",
       " 'Bryon Flitsch of MTV Buzzworthy said \\'Popular Song\\' was a different view of Mika\\'s previous attempt, stating that \"a passionate throw + interesting melodic + play lyrics = perfect song bounce around for.',\n",
       " 'The 9.3, 4.45 and 3.23 sq km islands will be redeemed in the Permatang Damar Sea waters, near Bayan Lepas.',\n",
       " 'Jijoe soon withdrew and was replaced by Reza.',\n",
       " 'As of 2016, SKDOJ has 740 students, 47 teachers and 6 school staff.',\n",
       " 'Akrab serves as a complement to co-operative linkages that will enable the cooperative to master the entire effective distribution chain especially in an effort to reduce prices of goods through bulk purchases and efficient distribution facilities. \"',\n",
       " 'False Dmitriy I and his Polish garrison were overthrown, and a boyar, Vasily Shuysky, was proclaimed tsar in 1606.',\n",
       " 'Every year, the Budget always prioritizes the Special Fund for School Improvement and Maintenance.']"
      ]
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "filtered_left"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "e5d6f81d",
   "metadata": {},
   "outputs": [],
   "source": [
    "r = bleu.corpus_score(sys, refs)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "id": "cf03103f",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'name': 'BLEU',\n",
       " 'score': 65.52581746285784,\n",
       " '_mean': -1.0,\n",
       " '_ci': -1.0,\n",
       " '_verbose': '85.3/70.0/60.8/53.2 (BP = 0.989 ratio = 0.989 hyp_len = 701 ref_len = 709)',\n",
       " 'bp': 0.9886526047274109,\n",
       " 'counts': [598, 468, 387, 322],\n",
       " 'totals': [701, 669, 637, 605],\n",
       " 'sys_len': 701,\n",
       " 'ref_len': 709,\n",
       " 'precisions': [85.30670470756063,\n",
       "  69.95515695067265,\n",
       "  60.75353218210361,\n",
       "  53.22314049586777],\n",
       " 'prec_str': '85.3/70.0/60.8/53.2',\n",
       " 'ratio': 0.9887165021156559}"
      ]
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "r.__dict__"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "id": "88704b13",
   "metadata": {},
   "outputs": [],
   "source": [
    "tokenizer.push_to_hub('t5-small-finetuned-noisy-ms-en', organization='mesolitica')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "id": "97afc0d5",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "bb20ca8fbf874030972eaaf520b70614",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "Upload file pytorch_model.bin:   0%|          | 4.00k/231M [00:00<?, ?B/s]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "To https://huggingface.co/mesolitica/t5-small-finetuned-noisy-ms-en\n",
      "   b9391eb..df949d6  main -> main\n",
      "\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "'https://huggingface.co/mesolitica/t5-small-finetuned-noisy-ms-en/commit/df949d638df32299f4ba271fabe0352b187e2fcf'"
      ]
     },
     "execution_count": 23,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model_gen.push_to_hub('t5-small-finetuned-noisy-ms-en', organization='mesolitica')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "id": "db817c07",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "ae27fe631bab4372b55bd88c4f37f007",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "Upload file tf_model.h5:   0%|          | 4.00k/231M [00:00<?, ?B/s]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "To https://huggingface.co/mesolitica/t5-small-finetuned-noisy-ms-en\n",
      "   df949d6..cd4e0aa  main -> main\n",
      "\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "'https://huggingface.co/mesolitica/t5-small-finetuned-noisy-ms-en/commit/cd4e0aa002a21a737839abed3887a567203b687e'"
      ]
     },
     "execution_count": 24,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model_tf.push_to_hub('t5-small-finetuned-noisy-ms-en', organization='mesolitica')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "id": "2819f991",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Already up to date.\r\n"
     ]
    }
   ],
   "source": [
    "!cd t5-small-finetuned-noisy-ms-en && git pull"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "id": "df9b8117",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[main d0a4b4b] add tensorboard\n",
      " 2 files changed, 5 insertions(+), 2 deletions(-)\n",
      " create mode 100644 events.out.tfevents.1657527614.huseincomel-desktop\n",
      "Uploading LFS objects: 100% (2/2), 114 MB | 4.3 MB/s, done.                     \n",
      "Enumerating objects: 6, done.\n",
      "Counting objects: 100% (6/6), done.\n",
      "Delta compression using up to 16 threads\n",
      "Compressing objects: 100% (4/4), done.\n",
      "Writing objects: 100% (4/4), 552 bytes | 552.00 KiB/s, done.\n",
      "Total 4 (delta 1), reused 0 (delta 0)\n",
      "To https://huggingface.co/mesolitica/t5-small-finetuned-noisy-ms-en\n",
      "   cd4e0aa..d0a4b4b  main -> main\n"
     ]
    }
   ],
   "source": [
    "!cp t5-small-noisy-ms-en/*.tfevents.* t5-small-finetuned-noisy-ms-en\n",
    "!cd t5-small-finetuned-noisy-ms-en && git add . && git commit -m 'add tensorboard' && git push"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.10"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
