{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "15454d71",
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import os\n",
    "import networkx as nx\n",
    "from scipy.ndimage import zoom\n",
    "def load_and_validate_data(file_path):\n",
    "    \"\"\"\n",
    "    Load data from a file and return it.\n",
    "    Return None if data is missing.\n",
    "    \"\"\"\n",
    "    try:\n",
    "        loaded_data = np.load(file_path)\n",
    "        \n",
    "        ppg_f = loaded_data.get('ppg_f')\n",
    "        ecg_f = loaded_data.get('ecg_f')\n",
    "        seg_dbp = loaded_data.get('seg_dbp')\n",
    "        seg_sbp = loaded_data.get('seg_sbp')\n",
    "        \n",
    "        \n",
    "        if ppg_f is None or ecg_f is None or seg_dbp is None or seg_sbp is None:\n",
    "            return None\n",
    "\n",
    "        return ppg_f, ecg_f, seg_dbp, seg_sbp\n",
    "    \n",
    "    except Exception as e:\n",
    "        print(f\"Error loading {file_path}: {e}\")\n",
    "        return None\n",
    "\n",
    "def combine_data_from_folder(folder_path, batch_size=100):\n",
    "    \"\"\"\n",
    "    Combine data from all valid files in the folder in batches.\n",
    "    \"\"\"\n",
    "    combined_ppg = []\n",
    "    combined_ecg = []\n",
    "    combined_seg_dbp = []\n",
    "    combined_seg_sbp = []\n",
    "\n",
    "    for file_name in os.listdir(folder_path):\n",
    "        file_path = os.path.join(folder_path, file_name)\n",
    "        \n",
    "        if not file_path.endswith('.npz'):\n",
    "            continue\n",
    "        \n",
    "        data = load_and_validate_data(file_path)\n",
    "        \n",
    "        if data is None:\n",
    "            print(f\"Skipping invalid file: {file_path}\")\n",
    "            continue\n",
    "        \n",
    "        ppg_f, ecg_f, seg_dbp, seg_sbp = data\n",
    "        \n",
    "        combined_ppg.append(ppg_f)\n",
    "        combined_ecg.append(ecg_f)\n",
    "        combined_seg_dbp.append(seg_dbp)\n",
    "        combined_seg_sbp.append(seg_sbp)\n",
    "        \n",
    "        if len(combined_ppg) >= batch_size:\n",
    "            combined_ppg = np.concatenate(combined_ppg, axis=0)\n",
    "            combined_ecg = np.concatenate(combined_ecg, axis=0)\n",
    "            combined_seg_dbp = np.concatenate(combined_seg_dbp, axis=0)\n",
    "            combined_seg_sbp = np.concatenate(combined_seg_sbp, axis=0)\n",
    "            \n",
    "            yield combined_ppg, combined_ecg, combined_seg_dbp, combined_seg_sbp\n",
    "            \n",
    "            combined_ppg = []\n",
    "            combined_ecg = []\n",
    "            combined_seg_dbp = []\n",
    "            combined_seg_sbp = []\n",
    "            \n",
    "\n",
    "    if combined_ppg:\n",
    "        combined_ppg = np.concatenate(combined_ppg, axis=0)\n",
    "    else:\n",
    "        combined_ppg = np.array([])\n",
    "        \n",
    "    if combined_ecg:\n",
    "        combined_ecg = np.concatenate(combined_ecg, axis=0)\n",
    "    else:\n",
    "        combined_ecg = np.array([])\n",
    "        \n",
    "    if combined_seg_dbp:\n",
    "        combined_seg_dbp = np.concatenate(combined_seg_dbp, axis=0)\n",
    "    else:\n",
    "        combined_seg_dbp = np.array([])\n",
    "        \n",
    "    if combined_seg_sbp:\n",
    "        combined_seg_sbp = np.concatenate(combined_seg_sbp, axis=0)\n",
    "    else:\n",
    "        combined_seg_sbp = np.array([])\n",
    "\n",
    "    yield combined_ppg, combined_ecg, combined_seg_dbp, combined_seg_sbp\n",
    "train_dir = 'C:\\\\Users\\\\nihal\\\\Desktop\\\\NIHAL_IMP_DOCS\\\\Internship_PPG\\\\Train_data'\n",
    "val_dir = 'C:\\\\Users\\\\nihal\\\\Desktop\\\\NIHAL_IMP_DOCS\\\\Internship_PPG\\\\Validation_data'\n",
    "test_dir = 'C:\\\\Users\\\\nihal\\\\Desktop\\\\NIHAL_IMP_DOCS\\\\Internship_PPG\\\\Test_data'"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "79e4e4e3",
   "metadata": {},
   "outputs": [],
   "source": [
    "def create_visibility_graph(ppg_signal):\n",
    "    n = len(ppg_signal)\n",
    "    G = nx.Graph()\n",
    "    G.add_nodes_from(range(n))\n",
    "\n",
    "    for i in range(n):\n",
    "        for j in range(i + 1, n):\n",
    "            visible = True\n",
    "            for k in range(i + 1, j):\n",
    "                if ppg_signal[k] >= ppg_signal[i] + (ppg_signal[j] - ppg_signal[i]) * (k - i) / (j - i):\n",
    "                    visible = False\n",
    "                    break\n",
    "            if visible:\n",
    "                G.add_edge(i, j)\n",
    "                \n",
    "    return G\n",
    "\n",
    "def graph_to_adjacency_matrix_image(G, size):\n",
    "    adj_matrix = nx.to_numpy_array(G)\n",
    "    adj_matrix_resized = zoom(adj_matrix, (size / adj_matrix.shape[0], size / adj_matrix.shape[1]), order=0)\n",
    "    return adj_matrix_resized\n",
    "\n",
    "def graph_to_flattened_adjacency_matrix(G, size):\n",
    "    adj_matrix = nx.to_numpy_array(G)\n",
    "    adj_matrix_resized = zoom(adj_matrix, (size / adj_matrix.shape[0], size / adj_matrix.shape[1]), order=0)\n",
    "    flattened_adj = adj_matrix_resized.flatten()\n",
    "    return flattened_adj[:size * size]  \n",
    "\n",
    "def generate_vg_image(ppg_signal, size):\n",
    "    G = create_visibility_graph(ppg_signal)\n",
    "    vg_image = graph_to_adjacency_matrix_image(G, size)\n",
    "    return vg_image.flatten()\n",
    "\n",
    "def process_signal(i, ppg_signal, vg_image_size):\n",
    "    \"\"\"\n",
    "    Generate a VG image for a given PPG signal.\n",
    "    \"\"\"\n",
    "    print(f\"VG img {i + 1}\")\n",
    "    ppg_signal = ppg_signal.flatten()\n",
    "    vg_image = generate_vg_image(ppg_signal, vg_image_size)\n",
    "    return vg_image\n",
    "\n",
    "vg_image_size=224"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "e9db5d55",
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "from scipy.signal import find_peaks\n",
    "\n",
    "def select_n_peak_window(ppg_signal, n_peaks=3):\n",
    "    ppg_3peak=[]\n",
    "    for ppg in ppg_signal:\n",
    "        x=ppg.flatten()\n",
    "        peaks, _ = find_peaks(x, distance=50)  \n",
    "    \n",
    "    \n",
    "    \n",
    "        if len(peaks) < n_peaks:\n",
    "            raise ValueError(f\"Not enough peaks detected. Detected peaks: {len(peaks)}\")\n",
    "    \n",
    "    \n",
    "        start_index = peaks[0]\n",
    "        end_index = peaks[n_peaks - 1]  \n",
    "    \n",
    "        ppg_window = x[start_index:end_index + 1]\n",
    "        ppg_3peak.append(np.array(ppg_window))\n",
    "    return ppg_3peak\n",
    "\n",
    "def reshape_ppg_3_peaks(ppg_signal):\n",
    "    ppg_3_peak_reshaped = []\n",
    "\n",
    "    for ppg_window in ppg_signal:\n",
    "        reshaped_window = ppg_window.reshape(1, -1)  # Reshape to (1, N)\n",
    "        ppg_3_peak_reshaped.append(np.array(reshaped_window)) \n",
    "    return ppg_3_peak_reshaped"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "99d12d39",
   "metadata": {},
   "outputs": [],
   "source": [
    "train_data_generator = combine_data_from_folder(train_dir, batch_size=10)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "17bf2b44",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Processing Batch 1...\n",
      "5078\n",
      "VG img 1\n",
      "VG img 2\n",
      "VG img 3\n",
      "VG img 4\n",
      "VG img 5\n",
      "VG img 6\n",
      "VG img 7\n",
      "VG img 8\n",
      "VG img 9\n",
      "VG img 10\n",
      "VG img 11\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "import os\n",
    "from concurrent.futures import ThreadPoolExecutor\n",
    "\n",
    "\n",
    "output_dir = 'Train_VG'\n",
    "os.makedirs(output_dir, exist_ok=True)\n",
    "\n",
    "for batch_idx, (combined_ppg_batch, combined_ecg_batch, combined_seg_dbp_batch, combined_seg_sbp_batch) in enumerate(train_data_generator):\n",
    "    \n",
    "    output_file = os.path.join(output_dir, f'Train_VG_FP_batch_{batch_idx + 1}.npz')\n",
    "    if os.path.exists(output_file):\n",
    "        print(f\"Batch {batch_idx + 1} already processed. Skipping...\")\n",
    "        continue\n",
    "        \n",
    "    print(f\"Processing Batch {batch_idx + 1}...\")\n",
    "    print(len(combined_seg_dbp_batch))\n",
    "    #ppg_peaks_3=select_n_peak_window(combined_ppg_batch)\n",
    "    #ppg_peaks_3_reshaped=reshape_ppg_3_peaks(ppg_peaks_3)\n",
    "    with ThreadPoolExecutor(max_workers=2) as executor:\n",
    "        \n",
    "        vg_images = list(executor.map(process_signal, range(len(combined_ppg_batch)), combined_ppg_batch, [vg_image_size]*len(combined_ppg_batch)))\n",
    "        \n",
    "    \n",
    "    np.savez_compressed(output_file, vg_images=vg_images)\n",
    "    print(f\"Batch {batch_idx + 1} processing complete.\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "bee6fcf5",
   "metadata": {},
   "outputs": [],
   "source": [
    "val_data_generator = combine_data_from_folder(val_dir, batch_size=10)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "07e73cd5",
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import os\n",
    "from concurrent.futures import ThreadPoolExecutor\n",
    "\n",
    "\n",
    "output_dir = 'Val_VG'\n",
    "os.makedirs(output_dir, exist_ok=True)\n",
    "\n",
    "\n",
    "for batch_idx, (combined_ppg_batch, combined_ecg_batch, combined_seg_dbp_batch, combined_seg_sbp_batch) in enumerate(val_data_generator):\n",
    "    \n",
    "    output_file = os.path.join(output_dir, f'Val_VG_batch_{batch_idx + 1}.npz')\n",
    "    if os.path.exists(output_file):\n",
    "        print(f\"Batch {batch_idx + 1} already processed. Skipping...\")\n",
    "        continue\n",
    "        \n",
    "    print(f\"Processing Batch {batch_idx + 1}...\")\n",
    "    print(len(combined_seg_dbp_batch))\n",
    "    ppg_peaks_3=select_n_peak_window(combined_ppg_batch)\n",
    "    ppg_peaks_3_reshaped=reshape_ppg_3_peaks(ppg_peaks_3)\n",
    "    with ThreadPoolExecutor(max_workers=2) as executor:\n",
    "        \n",
    "        vg_images = list(executor.map(process_signal, range(len(ppg_peaks_3_reshaped)), ppg_peaks_3_reshaped, [vg_image_size]*len(ppg_peaks_3_reshaped)))\n",
    "        \n",
    "    \n",
    "    np.savez_compressed(output_file, vg_images=vg_images)\n",
    "    print(f\"Batch {batch_idx + 1} processing complete.\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "79223d7c",
   "metadata": {},
   "outputs": [],
   "source": [
    "test_data_generator = combine_data_from_folder(test_dir, batch_size=10)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "d5f8a2fa",
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import os\n",
    "from concurrent.futures import ThreadPoolExecutor\n",
    "\n",
    "\n",
    "output_dir = 'Test_VG'\n",
    "os.makedirs(output_dir, exist_ok=True)\n",
    "\n",
    "\n",
    "for batch_idx, (combined_ppg_batch, combined_ecg_batch, combined_seg_dbp_batch, combined_seg_sbp_batch) in enumerate(test_data_generator):\n",
    "    \n",
    "    output_file = os.path.join(output_dir, f'Test_VG_batch_{batch_idx + 1}.npz')\n",
    "    if os.path.exists(output_file):\n",
    "        print(f\"Batch {batch_idx + 1} already processed. Skipping...\")\n",
    "        continue\n",
    "        \n",
    "    print(f\"Processing Batch {batch_idx + 1}...\")\n",
    "    print(len(combined_seg_dbp_batch))\n",
    "    ppg_peaks_3=select_n_peak_window(combined_ppg_batch)\n",
    "    ppg_peaks_3_reshaped=reshape_ppg_3_peaks(ppg_peaks_3)\n",
    "    with ThreadPoolExecutor() as executor:\n",
    "        \n",
    "        vg_images = list(executor.map(process_signal, range(len(ppg_peaks_3_reshaped)), ppg_peaks_3_reshaped, [vg_image_size]*len(ppg_peaks_3_reshaped)))\n",
    "        \n",
    "    \n",
    "    np.savez_compressed(output_file, vg_images=vg_images)\n",
    "    print(f\"Batch {batch_idx + 1} processing complete.\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "c2958e46",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.19"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
