{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "199c565b-7da9-43b9-a92e-a7de232caf22",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/david/anaconda3/envs/peft/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
      "  from .autonotebook import tqdm as notebook_tqdm\n"
     ]
    }
   ],
   "source": [
    "from awq import AutoAWQForCausalLM\n",
    "from transformers import AutoTokenizer"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "id": "75bd7575-712b-4c5b-9a5a-b9cc80c73189",
   "metadata": {},
   "outputs": [],
   "source": [
    "model_name_or_path = \"facebook/opt-6.7b\"\n",
    "quan_model_dir = \"../models/opt-6.7b-awq\"\n",
    "\n",
    "quant_config = {\"zero_point\": True, \"q_group_size\": 128, \"w_bit\": 4, \"version\": \"GEMM\" }"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "id": "174dbd8f-8670-4cde-a7c0-7d0bf2674bd8",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Fetching 12 files: 100%|███████████████████████████████████████████████████████████████████████████| 12/12 [00:00<00:00, 41323.19it/s]\n",
      "Loading checkpoint shards: 100%|████████████████████████████████████████████████████████████████████████| 2/2 [00:05<00:00,  2.52s/it]\n"
     ]
    }
   ],
   "source": [
    "model = AutoAWQForCausalLM.from_pretrained(model_name_or_path,device_map=\"auto\", safetensors=False)\n",
    "tokenizer = AutoTokenizer.from_pretrained(model_name_or_path,trust_remote_code=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "id": "65a9c867-8b4c-438d-a39b-429590275827",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Repo card metadata block was not found. Setting CardData to empty.\n",
      "WARNING:huggingface_hub.repocard:Repo card metadata block was not found. Setting CardData to empty.\n",
      "AWQ: 100%|████████████████████████████████████████████████████████████████████████████████████████████| 32/32 [42:27<00:00, 79.62s/it]\n"
     ]
    }
   ],
   "source": [
    "model.quantize(tokenizer, quant_config=quant_config)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "id": "38268e5f-6bde-4b78-a8cd-af8df9e04f2d",
   "metadata": {},
   "outputs": [],
   "source": [
    "from transformers import AwqConfig, AutoConfig"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "id": "7a0772a6-649a-43c4-8069-83c554677fb7",
   "metadata": {},
   "outputs": [],
   "source": [
    "quantization_config = AwqConfig(\n",
    "    bits=quant_config[\"w_bit\"],\n",
    "    group_size=quant_config[\"q_group_size\"],\n",
    "    zero_point=True,\n",
    "    version=quant_config[\"version\"].lower(),\n",
    ").to_dict()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "id": "47dc6e51-e95d-4118-aa79-33d2a98dba9b",
   "metadata": {},
   "outputs": [],
   "source": [
    "model.model.config.quantization_config = quantization_config"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "id": "d6b19e60-e809-4319-9802-4c5a8885bd13",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "('../models/opt-6.7b-awq/tokenizer_config.json',\n",
       " '../models/opt-6.7b-awq/special_tokens_map.json',\n",
       " '../models/opt-6.7b-awq/vocab.json',\n",
       " '../models/opt-6.7b-awq/merges.txt',\n",
       " '../models/opt-6.7b-awq/added_tokens.json',\n",
       " '../models/opt-6.7b-awq/tokenizer.json')"
      ]
     },
     "execution_count": 34,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model.save_quantized(quan_model_dir)\n",
    "tokenizer.save_pretrained(quan_model_dir)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "id": "ea677a77-b5e4-45be-b41b-766f641d09ce",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "OptAWQForCausalLM(\n",
       "  (model): OPTForCausalLM(\n",
       "    (model): OPTModel(\n",
       "      (decoder): OPTDecoder(\n",
       "        (embed_tokens): Embedding(50272, 4096, padding_idx=1)\n",
       "        (embed_positions): OPTLearnedPositionalEmbedding(2050, 4096)\n",
       "        (final_layer_norm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True)\n",
       "        (layers): ModuleList(\n",
       "          (0-31): 32 x OPTDecoderLayer(\n",
       "            (self_attn): OPTSdpaAttention(\n",
       "              (k_proj): WQLinear_GEMM(in_features=4096, out_features=4096, bias=True, w_bit=4, group_size=128)\n",
       "              (v_proj): WQLinear_GEMM(in_features=4096, out_features=4096, bias=True, w_bit=4, group_size=128)\n",
       "              (q_proj): WQLinear_GEMM(in_features=4096, out_features=4096, bias=True, w_bit=4, group_size=128)\n",
       "              (out_proj): WQLinear_GEMM(in_features=4096, out_features=4096, bias=True, w_bit=4, group_size=128)\n",
       "            )\n",
       "            (activation_fn): ReLU()\n",
       "            (self_attn_layer_norm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True)\n",
       "            (fc1): WQLinear_GEMM(in_features=4096, out_features=16384, bias=True, w_bit=4, group_size=128)\n",
       "            (fc2): WQLinear_GEMM(in_features=16384, out_features=4096, bias=True, w_bit=4, group_size=128)\n",
       "            (final_layer_norm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True)\n",
       "          )\n",
       "        )\n",
       "      )\n",
       "    )\n",
       "    (lm_head): Linear(in_features=4096, out_features=50272, bias=False)\n",
       "  )\n",
       ")"
      ]
     },
     "execution_count": 35,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model.eval()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "id": "480fe6ea-9bf8-452a-85cd-62f32854f816",
   "metadata": {},
   "outputs": [],
   "source": [
    "from transformers import AutoTokenizer, AutoModelForCausalLM\n",
    "\n",
    "tokenizer = AutoTokenizer.from_pretrained(quan_model_dir)\n",
    "model = AutoModelForCausalLM.from_pretrained(quan_model_dir,device_map=\"cuda\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "id": "52cc065b-01cd-46b7-a0ab-22d174d1cf20",
   "metadata": {},
   "outputs": [],
   "source": [
    "def generate_text(text):\n",
    "    inputs = tokenizer(text, return_tensors=\"pt\").to(\"cuda\")\n",
    "\n",
    "    out = model.generate(**inputs, max_new_tokens=64)\n",
    "    return tokenizer.decode(out[0], skip_special_tokens=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "id": "cc9b1f11-b58e-49d5-a203-751e1da28dd1",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Merry Christmas! I'm glad to see I'm not the only weirdo that leaves their tree up all year around. I've got four lights on top the tree...\n",
      "I leave mine up for my guests and only turn off the lights when I go to bed.  The tree stays, they can enjoy it with a glass of wine or a bag\n"
     ]
    }
   ],
   "source": [
    "result = generate_text(\"Merry Christmas! I'm glad to\")\n",
    "print(result)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "id": "331046e8-a689-4f65-a3a7-1a95b2d75930",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "The woman worked as a flight attendant on both routes for 15 to 20 years.\n",
      "\n",
      "A senior woman cabin crew member of Air India died of cardiac arrest Saturday morning at the Delhi airport.\n",
      "\n",
      "Air India spokesperson told news agency ANI that the woman, who worked as a Flight Attendant for 15 to 20 years on both routes, was\n"
     ]
    }
   ],
   "source": [
    "result = generate_text(\"The woman worked as a\")\n",
    "print(result)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "id": "b1cbc991-154f-4fa8-830e-48c78a512144",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "The girl worked as a doctor in the public hospital. She reportedly attended to a senior government officer who was suffering from cough and fever.\n",
      "\n",
      "The two were in love, but decided against marriage as the girl was working in the public domain due to government job, the complaint added.\n",
      "\n",
      "The accused were arrested under the Maharashtra Prohibition of R\n"
     ]
    }
   ],
   "source": [
    "result = generate_text(\"The girl worked as a\")\n",
    "print(result)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 41,
   "id": "7656e682-5974-4d37-abe6-85b9e1c91517",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "My name is David, my wife'name is Michelle, and I'm looking forward to some really wonderful, very fun, and very loving moments with you, when your heart tells you it is time.\n",
      "Thanks for this. Made me tear up a bit.\n"
     ]
    }
   ],
   "source": [
    "result = generate_text(\"My name is David, my wife'name is\")\n",
    "print(result)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "id": "965bab4f-6fc1-440e-97bb-64d58d444849",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "My name is Huang Weiwei, my friend's name is Fei Peng, his wife's name is A Shan, and Fei Peng's wife's name is A Shan\n",
      "I remember in Civ 5 they would say \"my great grandmother is A Shan\" or something along those lines\n",
      "Haha that's great, I love Civ V\n"
     ]
    }
   ],
   "source": [
    "result = generate_text(\"My name is Huang Weiwei, my friend's name is Fei Peng, his wife's name is A Shan, and Fei Peng's wife's name is\")\n",
    "print(result)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "3ed36ea7-1a36-4065-b2f2-203f772327f4",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.16"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
