{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "436b3551",
   "metadata": {},
   "outputs": [],
   "source": [
    "from ultralytics import YOLO\n",
    "import cv2\n",
    "import numpy as np\n",
    "import os\n",
    "\n",
    "def detection_gauge_face(img, img_path, model_path, box_index=-1):\n",
    "    \n",
    "    model = YOLO(model_path)  # load model\n",
    "\n",
    "    results = model(img)  # run inference\n",
    "    \n",
    "#     print(results[0].boxes)\n",
    "\n",
    "    boxes = results[0].boxes\n",
    "    \n",
    "#     assert len(boxes)>0, f\"no bbox detected for image {img_path}\"\n",
    "#     print(f\"{len(boxes)} boxes were found\")\n",
    "    if box_index>=0:\n",
    "        m_box = boxes[box_index]\n",
    "    else:     \n",
    "        m_box = boxes[0]\n",
    "    return m_box.xyxy[0].int(), boxes\n",
    "\n",
    "\n",
    "def crop_image(img, box):\n",
    "    img = np.copy(img)\n",
    "    cropped_img = img[box[1]:box[3],\n",
    "                      box[0]:box[2], :]  # image has format [y, x, rgb]\n",
    "    \n",
    "    height = int(box[3]-box[1])\n",
    "    width = int(box[2]-box[0])\n",
    "    \n",
    "    print(f\"Height is {height}, Width is {width}\")\n",
    "    # want to preserve aspect ratio but make image square, so do padding\n",
    "    if height > width:\n",
    "        delta = height-width\n",
    "        left, right = delta//2, delta - (delta//2)\n",
    "        top = bottom = 0\n",
    "    else:\n",
    "        delta = width-height\n",
    "        top, bottom = delta//2, delta - (delta//2)\n",
    "        left = right = 0\n",
    "            \n",
    "    pad_color = [0, 0, 0]\n",
    "    new_img = cv2.copyMakeBorder(cropped_img, top, bottom, left, right, cv2.BORDER_CONSTANT, value=pad_color)\n",
    "    return new_img\n",
    "\n",
    "\n",
    "def process_image(img_path, model_path, box_index=-1):\n",
    "    image = cv2.imread(img_path)\n",
    "\n",
    "    # Gauge detection\n",
    "    box, boxes = detection_gauge_face(image, img_path, model_path, box_index= box_index)\n",
    "\n",
    "    # crop image to only gauge face\n",
    "    cropped_img = crop_image(image, box)\n",
    "    \n",
    "    resolution = (448,448)\n",
    "    resized_img = cv2.resize(cropped_img, resolution, interpolation = cv2.INTER_LINEAR)\n",
    "\n",
    "    return resized_img, boxes, image\n",
    "\n",
    "def get_files_from_folder(folder):\n",
    "    filenames = []\n",
    "    for filename in os.listdir(folder):\n",
    "        filenames.append(filename)\n",
    "    return filenames\n",
    "\n",
    "def crop_and_save_img(filename, src_dir, dest_dir, model_path, box_index=-1):\n",
    "    img_path = src_dir + filename\n",
    "\n",
    "    cropped_img, boxes, image = process_image(img_path, model_path, box_index)\n",
    "    \n",
    "    new_file_path = os.path.join(dest_dir, 'cropped_'+ filename)\n",
    "    cv2.imwrite(new_file_path, cropped_img)\n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "454945ab",
   "metadata": {},
   "outputs": [],
   "source": [
    "import matplotlib\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "matplotlib.use( 'tkagg' )\n",
    "\n",
    "def plot_bounding_box_img(img, boxes):\n",
    "    \"\"\"\n",
    "    plot detected bounding boxes. boxes is the result of the yolov8 detection\n",
    "    :param img: image to draw bounding boxes on\n",
    "    :param boxes: list of bounding boxes\n",
    "    \"\"\"\n",
    "    for box in boxes:\n",
    "        bbox = box.xyxy[0].int()\n",
    "        start_point = (int(bbox[0]), int(bbox[1]))\n",
    "        end_point = (int(bbox[2]), int(bbox[3]))\n",
    "\n",
    "        color_face = (0, 255, 0)\n",
    "        color_needle = (255, 0, 0)\n",
    "        if box.cls == 0:\n",
    "            color = color_face\n",
    "        else:\n",
    "            color = color_needle\n",
    "\n",
    "        img = cv2.rectangle(img,\n",
    "                            start_point,\n",
    "                            end_point,\n",
    "                            color=color,\n",
    "                            thickness=5)\n",
    "\n",
    "    plt.figure()\n",
    "    plt.imshow(img)\n",
    "    plt.show()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "a22ff193",
   "metadata": {},
   "outputs": [],
   "source": [
    "image_directory = 'data/key_point_train/images_original_new/'\n",
    "new_image_directory = 'data/key_point_train/images_cropped_new/'\n",
    "model_path = \"gauge_detection_model.pt\"\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "e2409d0e",
   "metadata": {},
   "outputs": [],
   "source": [
    "image_directory = '../../data/key_point_train/images_original_new/'\n",
    "new_image_directory = '../../data/key_point_train/images_cropped_new/'\n",
    "model_path = \"gauge_detection_model.pt\"\n",
    "\n",
    "test_file_names = get_files_from_folder(image_directory)\n",
    "\n",
    "os.makedirs(new_image_directory, exist_ok=True)\n",
    "\n",
    "for filename in test_file_names:\n",
    "    crop_and_save_img(filename, image_directory, new_image_directory, model_path)\n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "5f198cd6",
   "metadata": {},
   "outputs": [],
   "source": [
    "filename = '20220826_205802561_iOS_jpg.rf.0a00bb7a3ba8166691630082e6fb9d69.jpg'\n",
    "\n",
    "cropped_img, boxes, image = process_image(image_directory + filename, model_path, box_index=1)\n",
    "\n",
    "plot_bounding_box_img(image, boxes)\n",
    "cv2.imshow('image', cropped_img)\n",
    "cv2.waitKey(0)\n",
    "# closing all open windows\n",
    "cv2.destroyAllWindows()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "ac2183e4",
   "metadata": {},
   "outputs": [],
   "source": [
    "crop_and_save_img(filename, image_directory, new_image_directory, model_path, 1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "0e0b77ff",
   "metadata": {},
   "outputs": [],
   "source": [
    "i=1\n",
    "for filename in os.listdir(new_image_directory):\n",
    "    os.rename(new_image_directory + filename, new_image_directory + str(i) + \"_\" + filename)\n",
    "    i+=1"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "6d7b45e1",
   "metadata": {},
   "outputs": [],
   "source": [
    "# resize\n",
    "resolution = (448,448)\n",
    "for filename in os.listdir(new_image_directory):\n",
    "    img_path = os.path.join(new_image_directory, filename)\n",
    "    image = cv2.imread(img_path)\n",
    "    resized_img = cv2.resize(image, resolution, interpolation = cv2.INTER_LINEAR)\n",
    "    cv2.imwrite(img_path, resized_img)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "2d15d9ef",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
