{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "aa68f682",
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import os\n",
    "import networkx as nx\n",
    "from scipy.ndimage import zoom\n",
    "def load_and_validate_data(file_path):\n",
    "    \"\"\"\n",
    "    Load data from a file and return it.\n",
    "    Return None if data is missing.\n",
    "    \"\"\"\n",
    "    try:\n",
    "        loaded_data = np.load(file_path)\n",
    "        \n",
    "        ppg_f = loaded_data.get('ppg_f')\n",
    "        ecg_f = loaded_data.get('ecg_f')\n",
    "        seg_dbp = loaded_data.get('seg_dbp')\n",
    "        seg_sbp = loaded_data.get('seg_sbp')\n",
    "        \n",
    "        \n",
    "        if ppg_f is None or ecg_f is None or seg_dbp is None or seg_sbp is None:\n",
    "            return None\n",
    "\n",
    "        return ppg_f, ecg_f, seg_dbp, seg_sbp\n",
    "    \n",
    "    except Exception as e:\n",
    "        print(f\"Error loading {file_path}: {e}\")\n",
    "        return None\n",
    "\n",
    "def combine_data_from_folder(folder_path, batch_size=100):\n",
    "    \"\"\"\n",
    "    Combine data from all valid files in the folder in batches.\n",
    "    \"\"\"\n",
    "    combined_ppg = []\n",
    "    combined_ecg = []\n",
    "    combined_seg_dbp = []\n",
    "    combined_seg_sbp = []\n",
    "\n",
    "    for file_name in os.listdir(folder_path):\n",
    "        file_path = os.path.join(folder_path, file_name)\n",
    "        \n",
    "        if not file_path.endswith('.npz'):\n",
    "            continue\n",
    "        \n",
    "        data = load_and_validate_data(file_path)\n",
    "        \n",
    "        if data is None:\n",
    "            print(f\"Skipping invalid file: {file_path}\")\n",
    "            continue\n",
    "        \n",
    "        ppg_f, ecg_f, seg_dbp, seg_sbp = data\n",
    "        \n",
    "        combined_ppg.append(ppg_f)\n",
    "        combined_ecg.append(ecg_f)\n",
    "        combined_seg_dbp.append(seg_dbp)\n",
    "        combined_seg_sbp.append(seg_sbp)\n",
    "        \n",
    "        if len(combined_ppg) >= batch_size:\n",
    "            combined_ppg = np.concatenate(combined_ppg, axis=0)\n",
    "            combined_ecg = np.concatenate(combined_ecg, axis=0)\n",
    "            combined_seg_dbp = np.concatenate(combined_seg_dbp, axis=0)\n",
    "            combined_seg_sbp = np.concatenate(combined_seg_sbp, axis=0)\n",
    "            \n",
    "            yield combined_ppg, combined_ecg, combined_seg_dbp, combined_seg_sbp\n",
    "            \n",
    "            combined_ppg = []\n",
    "            combined_ecg = []\n",
    "            combined_seg_dbp = []\n",
    "            combined_seg_sbp = []\n",
    "            \n",
    "\n",
    "    if combined_ppg:\n",
    "        combined_ppg = np.concatenate(combined_ppg, axis=0)\n",
    "    else:\n",
    "        combined_ppg = np.array([])\n",
    "        \n",
    "    if combined_ecg:\n",
    "        combined_ecg = np.concatenate(combined_ecg, axis=0)\n",
    "    else:\n",
    "        combined_ecg = np.array([])\n",
    "        \n",
    "    if combined_seg_dbp:\n",
    "        combined_seg_dbp = np.concatenate(combined_seg_dbp, axis=0)\n",
    "    else:\n",
    "        combined_seg_dbp = np.array([])\n",
    "        \n",
    "    if combined_seg_sbp:\n",
    "        combined_seg_sbp = np.concatenate(combined_seg_sbp, axis=0)\n",
    "    else:\n",
    "        combined_seg_sbp = np.array([])\n",
    "\n",
    "    yield combined_ppg, combined_ecg, combined_seg_dbp, combined_seg_sbp\n",
    "train_dir = 'C:\\\\Users\\\\nihal\\\\Desktop\\\\NIHAL_IMP_DOCS\\\\Internship_PPG\\\\Train_data'\n",
    "val_dir = 'C:\\\\Users\\\\nihal\\\\Desktop\\\\NIHAL_IMP_DOCS\\\\Internship_PPG\\\\Validation_data'\n",
    "test_dir = 'C:\\\\Users\\\\nihal\\\\Desktop\\\\NIHAL_IMP_DOCS\\\\Internship_PPG\\\\Test_data'"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "e9cee2b3",
   "metadata": {},
   "outputs": [],
   "source": [
    "def create_visibility_graph(ppg_signal):\n",
    "    n = len(ppg_signal)\n",
    "    G = nx.Graph()\n",
    "    G.add_nodes_from(range(n))\n",
    "\n",
    "    for i in range(n):\n",
    "        for j in range(i + 1, n):\n",
    "            visible = True\n",
    "            for k in range(i + 1, j):\n",
    "                if ppg_signal[k] >= ppg_signal[i] + (ppg_signal[j] - ppg_signal[i]) * (k - i) / (j - i):\n",
    "                    visible = False\n",
    "                    break\n",
    "            if visible:\n",
    "                G.add_edge(i, j)\n",
    "                \n",
    "    return G\n",
    "\n",
    "def graph_to_adjacency_matrix_image(G, size):\n",
    "    adj_matrix = nx.to_numpy_array(G)\n",
    "    adj_matrix_resized = zoom(adj_matrix, (size / adj_matrix.shape[0], size / adj_matrix.shape[1]), order=0)\n",
    "    return adj_matrix_resized\n",
    "\n",
    "def graph_to_flattened_adjacency_matrix(G, size):\n",
    "    adj_matrix = nx.to_numpy_array(G)\n",
    "    adj_matrix_resized = zoom(adj_matrix, (size / adj_matrix.shape[0], size / adj_matrix.shape[1]), order=0)\n",
    "    flattened_adj = adj_matrix_resized.flatten()\n",
    "    return flattened_adj[:size * size]  \n",
    "\n",
    "def generate_vg_image(ppg_signal, size):\n",
    "    G = create_visibility_graph(ppg_signal)\n",
    "    vg_image = graph_to_adjacency_matrix_image(G, size)\n",
    "    return vg_image\n",
    "\n",
    "def process_signal(i, ppg_signal, vg_image_size):\n",
    "    \"\"\"\n",
    "    Generate a VG image for a given PPG signal.\n",
    "    \"\"\"\n",
    "    #print(f\"VG img {i + 1}\")\n",
    "    ppg_signal = ppg_signal.flatten()\n",
    "    vg_image = generate_vg_image(ppg_signal, vg_image_size)\n",
    "    return vg_image.flatten()\n",
    "\n",
    "vg_image_size=224"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "4c403ee5",
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "from scipy.signal import find_peaks\n",
    "\n",
    "def select_n_peak_window(ppg_signal, n_peaks=3):\n",
    "    ppg_3peak=[]\n",
    "    for ppg in ppg_signal:\n",
    "        x=ppg.flatten()\n",
    "        peaks, _ = find_peaks(x, distance=50)  \n",
    "    \n",
    "    \n",
    "    \n",
    "        if len(peaks) < n_peaks:\n",
    "            raise ValueError(f\"Not enough peaks detected. Detected peaks: {len(peaks)}\")\n",
    "    \n",
    "    \n",
    "        start_index = peaks[0]\n",
    "        end_index = peaks[n_peaks - 1]  \n",
    "    \n",
    "    \n",
    "        ppg_window = x[start_index:end_index + 1]\n",
    "        ppg_3peak.append(np.array(ppg_window))\n",
    "    \n",
    "    return ppg_3peak\n",
    "\n",
    "def reshape_ppg_3_peaks(ppg_signal):\n",
    "    ppg_3_peak_reshaped = []\n",
    "\n",
    "    for ppg_window in ppg_signal:\n",
    "        reshaped_window = ppg_window.reshape(1, -1)  \n",
    "        ppg_3_peak_reshaped.append(np.array(reshaped_window))\n",
    "        \n",
    "    return ppg_3_peak_reshaped"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "83a67182",
   "metadata": {},
   "outputs": [],
   "source": [
    "test_data_generator = combine_data_from_folder(test_dir, batch_size=10)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "50dd28fc",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Batch 1 already processed. Skipping...\n",
      "Batch 2 already processed. Skipping...\n",
      "Batch 3 already processed. Skipping...\n",
      "Batch 4 already processed. Skipping...\n",
      "Batch 5 already processed. Skipping...\n",
      "Processing Batch 6...\n",
      "7198\n"
     ]
    },
    {
     "ename": "MemoryError",
     "evalue": "Unable to allocate 2.69 GiB for an array with shape (7198, 50176) and data type float64",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mMemoryError\u001b[0m                               Traceback (most recent call last)",
      "Cell \u001b[1;32mIn[5], line 26\u001b[0m\n\u001b[0;32m     21\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m ThreadPoolExecutor() \u001b[38;5;28;01mas\u001b[39;00m executor:\n\u001b[0;32m     23\u001b[0m     vg_images \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mlist\u001b[39m(executor\u001b[38;5;241m.\u001b[39mmap(process_signal, \u001b[38;5;28mrange\u001b[39m(\u001b[38;5;28mlen\u001b[39m(ppg_peaks_3_reshaped)), ppg_peaks_3_reshaped, [vg_image_size]\u001b[38;5;241m*\u001b[39m\u001b[38;5;28mlen\u001b[39m(ppg_peaks_3_reshaped)))\n\u001b[1;32m---> 26\u001b[0m \u001b[43mnp\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msavez_compressed\u001b[49m\u001b[43m(\u001b[49m\u001b[43moutput_file\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvg_images\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mvg_images\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m     27\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mBatch \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mbatch_idx\u001b[38;5;250m \u001b[39m\u001b[38;5;241m+\u001b[39m\u001b[38;5;250m \u001b[39m\u001b[38;5;241m1\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m processing complete.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n",
      "File \u001b[1;32m<__array_function__ internals>:200\u001b[0m, in \u001b[0;36msavez_compressed\u001b[1;34m(*args, **kwargs)\u001b[0m\n",
      "File \u001b[1;32m~\\anaconda3\\envs\\intern\\lib\\site-packages\\numpy\\lib\\npyio.py:686\u001b[0m, in \u001b[0;36msavez_compressed\u001b[1;34m(file, *args, **kwds)\u001b[0m\n\u001b[0;32m    623\u001b[0m \u001b[38;5;129m@array_function_dispatch\u001b[39m(_savez_compressed_dispatcher)\n\u001b[0;32m    624\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21msavez_compressed\u001b[39m(file, \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwds):\n\u001b[0;32m    625\u001b[0m \u001b[38;5;250m    \u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m    626\u001b[0m \u001b[38;5;124;03m    Save several arrays into a single file in compressed ``.npz`` format.\u001b[39;00m\n\u001b[0;32m    627\u001b[0m \n\u001b[1;32m   (...)\u001b[0m\n\u001b[0;32m    684\u001b[0m \n\u001b[0;32m    685\u001b[0m \u001b[38;5;124;03m    \"\"\"\u001b[39;00m\n\u001b[1;32m--> 686\u001b[0m     \u001b[43m_savez\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfile\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mkwds\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m)\u001b[49m\n",
      "File \u001b[1;32m~\\anaconda3\\envs\\intern\\lib\\site-packages\\numpy\\lib\\npyio.py:716\u001b[0m, in \u001b[0;36m_savez\u001b[1;34m(file, args, kwds, compress, allow_pickle, pickle_kwargs)\u001b[0m\n\u001b[0;32m    714\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m key, val \u001b[38;5;129;01min\u001b[39;00m namedict\u001b[38;5;241m.\u001b[39mitems():\n\u001b[0;32m    715\u001b[0m     fname \u001b[38;5;241m=\u001b[39m key \u001b[38;5;241m+\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m.npy\u001b[39m\u001b[38;5;124m'\u001b[39m\n\u001b[1;32m--> 716\u001b[0m     val \u001b[38;5;241m=\u001b[39m \u001b[43mnp\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43masanyarray\u001b[49m\u001b[43m(\u001b[49m\u001b[43mval\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m    717\u001b[0m     \u001b[38;5;66;03m# always force zip64, gh-10776\u001b[39;00m\n\u001b[0;32m    718\u001b[0m     \u001b[38;5;28;01mwith\u001b[39;00m zipf\u001b[38;5;241m.\u001b[39mopen(fname, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mw\u001b[39m\u001b[38;5;124m'\u001b[39m, force_zip64\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m) \u001b[38;5;28;01mas\u001b[39;00m fid:\n",
      "\u001b[1;31mMemoryError\u001b[0m: Unable to allocate 2.69 GiB for an array with shape (7198, 50176) and data type float64"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "import os\n",
    "from concurrent.futures import ThreadPoolExecutor\n",
    "\n",
    "\n",
    "output_dir = 'Test_VG'\n",
    "os.makedirs(output_dir, exist_ok=True)\n",
    "\n",
    "\n",
    "for batch_idx, (combined_ppg_batch, combined_ecg_batch, combined_seg_dbp_batch, combined_seg_sbp_batch) in enumerate(test_data_generator):\n",
    "    \n",
    "    output_file = os.path.join(output_dir, f'Test_VG_batch_{batch_idx + 1}.npz')\n",
    "    if os.path.exists(output_file):\n",
    "        print(f\"Batch {batch_idx + 1} already processed. Skipping...\")\n",
    "        continue\n",
    "        \n",
    "    print(f\"Processing Batch {batch_idx + 1}...\")\n",
    "    print(len(combined_seg_dbp_batch))\n",
    "    ppg_peaks_3=select_n_peak_window(combined_ppg_batch)\n",
    "    ppg_peaks_3_reshaped=reshape_ppg_3_peaks(ppg_peaks_3)\n",
    "    with ThreadPoolExecutor() as executor:\n",
    "        \n",
    "        vg_images = list(executor.map(process_signal, range(len(ppg_peaks_3_reshaped)), ppg_peaks_3_reshaped, [vg_image_size]*len(ppg_peaks_3_reshaped)))\n",
    "        \n",
    "    \n",
    "    np.savez_compressed(output_file, vg_images=vg_images)\n",
    "    print(f\"Batch {batch_idx + 1} processing complete.\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "6dcff8d9",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.19"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
