{"metadata":{"kernelspec":{"language":"python","display_name":"Python 3","name":"python3"},"language_info":{"name":"python","version":"3.7.10","mimetype":"text/x-python","codemirror_mode":{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"python","file_extension":".py"}},"nbformat_minor":4,"nbformat":4,"cells":[{"cell_type":"markdown","source":"# Importing Libraries","metadata":{}},{"cell_type":"code","source":"! pip install biosppy    ","metadata":{"execution":{"iopub.status.busy":"2021-07-29T10:23:13.109551Z","iopub.execute_input":"2021-07-29T10:23:13.109985Z","iopub.status.idle":"2021-07-29T10:23:24.845263Z","shell.execute_reply.started":"2021-07-29T10:23:13.109900Z","shell.execute_reply":"2021-07-29T10:23:24.843995Z"},"trusted":true},"execution_count":1,"outputs":[{"name":"stdout","text":"Collecting biosppy\n  Downloading biosppy-0.7.3.tar.gz (85 kB)\n\u001b[K     |████████████████████████████████| 85 kB 1.4 MB/s eta 0:00:011\n\u001b[?25hCollecting bidict\n  Downloading bidict-0.21.2-py2.py3-none-any.whl (37 kB)\nRequirement already satisfied: h5py in /opt/conda/lib/python3.7/site-packages (from biosppy) (2.10.0)\nRequirement already satisfied: matplotlib in /opt/conda/lib/python3.7/site-packages (from biosppy) (3.4.2)\nRequirement already satisfied: numpy in /opt/conda/lib/python3.7/site-packages (from biosppy) (1.19.5)\nRequirement already satisfied: scikit-learn in /opt/conda/lib/python3.7/site-packages (from biosppy) (0.23.2)\nRequirement already satisfied: scipy in /opt/conda/lib/python3.7/site-packages (from biosppy) (1.6.3)\nRequirement already satisfied: shortuuid in /opt/conda/lib/python3.7/site-packages (from biosppy) (1.0.1)\nRequirement already satisfied: six in /opt/conda/lib/python3.7/site-packages (from biosppy) (1.15.0)\nRequirement already satisfied: joblib in /opt/conda/lib/python3.7/site-packages (from biosppy) (1.0.1)\nRequirement already satisfied: opencv-python in /opt/conda/lib/python3.7/site-packages (from biosppy) (4.5.2.54)\nRequirement already satisfied: kiwisolver>=1.0.1 in /opt/conda/lib/python3.7/site-packages (from matplotlib->biosppy) (1.3.1)\nRequirement already satisfied: pyparsing>=2.2.1 in /opt/conda/lib/python3.7/site-packages (from matplotlib->biosppy) (2.4.7)\nRequirement already satisfied: python-dateutil>=2.7 in /opt/conda/lib/python3.7/site-packages (from matplotlib->biosppy) (2.8.1)\nRequirement already satisfied: cycler>=0.10 in /opt/conda/lib/python3.7/site-packages (from matplotlib->biosppy) (0.10.0)\nRequirement already satisfied: pillow>=6.2.0 in /opt/conda/lib/python3.7/site-packages (from matplotlib->biosppy) (8.2.0)\nRequirement already satisfied: threadpoolctl>=2.0.0 in /opt/conda/lib/python3.7/site-packages (from scikit-learn->biosppy) (2.1.0)\nBuilding wheels for collected packages: biosppy\n  Building wheel for biosppy (setup.py) ... \u001b[?25ldone\n\u001b[?25h  Created wheel for biosppy: filename=biosppy-0.7.3-py2.py3-none-any.whl size=95409 sha256=dff729516c6530dccc46d413f715fe091bbf04d74fa903a474dab25816eff212\n  Stored in directory: /root/.cache/pip/wheels/2f/4f/8f/28b2adc462d7e37245507324f4817ce1c64ef2464f099f4f0b\nSuccessfully built biosppy\nInstalling collected packages: bidict, biosppy\nSuccessfully installed bidict-0.21.2 biosppy-0.7.3\n\u001b[33mWARNING: Running pip as root will break packages and permissions. You should install packages reliably by using venv: https://pip.pypa.io/warnings/venv\u001b[0m\n","output_type":"stream"}]},{"cell_type":"code","source":"import numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport tensorflow as tf\nimport wfdb\nimport os                                                                                                         \nimport gc\nimport scipy       \nimport sklearn\nfrom pathlib import Path\nfrom sklearn.utils import shuffle\nfrom sklearn.manifold import TSNE\nimport seaborn as sns\nfrom sklearn import preprocessing\nimport shutil\nimport math\nimport random\nfrom scipy.spatial import distance\nfrom biosppy.signals import ecg\nfrom scipy.interpolate import PchipInterpolator","metadata":{"execution":{"iopub.status.busy":"2021-07-29T10:23:24.847014Z","iopub.execute_input":"2021-07-29T10:23:24.847305Z","iopub.status.idle":"2021-07-29T10:23:32.657692Z","shell.execute_reply.started":"2021-07-29T10:23:24.847273Z","shell.execute_reply":"2021-07-29T10:23:32.656638Z"},"trusted":true},"execution_count":2,"outputs":[]},{"cell_type":"code","source":"try:\n    tpu = tf.distribute.cluster_resolver.TPUClusterResolver()  # TPU detection\n    print('Running on TPU ', tpu.cluster_spec().as_dict()['worker'])\nexcept ValueError:\n    raise BaseException('ERROR: Not connected to a TPU runtime; please see the previous cell in this notebook for instructions!')\n\ntf.config.experimental_connect_to_cluster(tpu)\ntf.tpu.experimental.initialize_tpu_system(tpu)\ntpu_strategy = tf.distribute.experimental.TPUStrategy(tpu)","metadata":{"execution":{"iopub.status.busy":"2021-07-29T10:23:32.659624Z","iopub.execute_input":"2021-07-29T10:23:32.659940Z","iopub.status.idle":"2021-07-29T10:23:38.761185Z","shell.execute_reply.started":"2021-07-29T10:23:32.659909Z","shell.execute_reply":"2021-07-29T10:23:38.760380Z"},"trusted":true},"execution_count":3,"outputs":[{"name":"stdout","text":"Running on TPU  ['10.0.0.2:8470']\n","output_type":"stream"}]},{"cell_type":"markdown","source":"# Dataset Creation","metadata":{}},{"cell_type":"code","source":"####### Dataset Creation \n\n###### Constants\nFS = 500\nW_LEN = 256\nW_LEN_1_4 = 256 // 4\nW_LEN_3_4 = 3 * (256 // 4)\n\n###### Function to Read a Record\ndef read_rec(rec_path):\n\n    \"\"\" \n    Function to read record and return Segmented Signals\n\n    INPUTS:-\n    1) rec_path : Path of the Record\n\n    OUTPUTS:-\n    1) seg_sigs : Final Segmented Signals\n\n    \"\"\"\n    number_of_peaks = 2 # For extracting the required number of peaks                                    \n    full_rec = (wfdb.rdrecord(rec_path)).p_signal[:,1] # Entire Record\n\n    f = PchipInterpolator(np.arange(10000),full_rec) # Fitting Interpolation Function\n    x_samp = (np.arange(10000)*(500/360))[:7200] # Fixing Interpolation Input Values\n    full_rec_interp = f(x_samp)  # Intepolating Values \n    r_peaks_init = ecg.hamilton_segmenter(full_rec_interp,360)[0] # R-Peak Segmentation and input is the signal frequency of 500Hz in this case\n    final_peak_index = r_peaks_init[int(r_peaks_init.shape[0] - int((r_peaks_init.shape[0]%number_of_peaks)))-1]\n    r_peaks_final = r_peaks_init[:final_peak_index] # Final Number of R_Peaks\n    full_rec_final = full_rec_interp[:int(r_peaks_final[-1]+W_LEN)] # Final Sequence\n    seg_sigs, r_peaks_ref = segmentSignals(full_rec_final,list(r_peaks_final)) # Final Signal Segmentation\n\n    return seg_sigs # Returning the Ouput of the Signal Segmentation\n\n###### Function to Segment Signals\n\n##### Function\ndef segmentSignals(signal, r_peaks_annot, normalization=True, person_id= None, file_id=None):\n    \n    \"\"\"\n    Segments signals based on the detected R-Peak\n    Args:\n        signal (numpy array): input signal\n        r_peaks_annot (int []): r-peak locations.\n        normalization (bool, optional): apply z-normalization or not? . Defaults to True.\n        person_id ([type], optional): [description]. Defaults to None.\n        file_id ([type], optional): [description]. Defaults to None.\n    Returns:\n            [tuple(numpy array,numpy array)]: segmented signals and refined r-peaks\n    \"\"\"\n    def refine_rpeaks(signal, r_peaks):\n        \"\"\"\n        Refines the detected R-peaks. If the R-peak is slightly shifted, this assigns the \n        highest point R-peak.\n        Args:\n            signal (numpy array): input signal\n            r_peaks (int []): list of detected r-peaks\n        Returns:\n            [numpy array]: refined r-peaks\n        \"\"\"\n        r_peaks2 = np.array(r_peaks)            # make a copy\n        for i in range(len(r_peaks)):\n            r = r_peaks[i]          # current R-peak\n            small_segment = signal[max(0,r-100):min(len(signal),r+100)]         # consider the neighboring segment of R-peak\n            r_peaks2[i] = np.argmax(small_segment) - 100 + r_peaks[i]           # picking the highest point\n            r_peaks2[i] = min(r_peaks2[i],len(signal))                          # the detected R-peak shouldn't be outside the signal\n            r_peaks2[i] = max(r_peaks2[i],0)                                    # checking if it goes before zero    \n        return r_peaks2                     # returning the refined r-peak list\n    \n    segmented_signals = []                      # array containing the segmented beats\n    \n    r_peaks = np.array(r_peaks_annot)\n\n    r_peaks = refine_rpeaks(signal, r_peaks)\n    skip_len = 2 # Parameter to specify number of r_peaks in one signal\n    max_seq_len = 512 # Parameter to specify maximum sequence length\n    \n    for r_curr in range(0,int(r_peaks.shape[0]-(skip_len-1)),skip_len):\n        if ((r_peaks[r_curr]-W_LEN_1_4)<0) or ((r_peaks[r_curr+(skip_len-1)]+W_LEN_3_4)>=len(signal)):           # not enough signal to segment\n            continue\n        segmented_signal = np.array(signal[r_peaks[r_curr]-W_LEN_1_4:r_peaks[r_curr+(skip_len-1)]+W_LEN_3_4])        # segmenting a heartbeat\n        segmented_signal = list(segmented_signal)\n        #print(segmented_signal.shape)\n        \n        if(len(segmented_signal) < 512):\n            for m in range(int(512-len(segmented_signal))): # Zero Padding\n                segmented_signal.append(0)\n        else:\n            segmented_signal = (segmented_signal[:int(max_seq_len)])\n            \n        segmented_signal = np.array(segmented_signal)\n        \n        if(segmented_signal.shape != (512,1)):    \n            segmented_signal = np.reshape(segmented_signal,(512,1))\n            \n        if (normalization):             # Z-score normalization\n            if abs(np.std(segmented_signal))<1e-6:          # flat line ECG, will cause zero division error\n                continue\n            segmented_signal = (segmented_signal - np.mean(segmented_signal)) / np.std(segmented_signal)            \n              \n        #if not np.isnan(segmented_signal).any():                    # checking for nan, this will never happen\n            segmented_signals.append(segmented_signal)\n\n    return segmented_signals,r_peaks           # returning the segmented signals and the refined r-peaks","metadata":{"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"###### Extracting List of the Elements with Two Sessions \ndir = '../input/ecg1d/ecg-id-database-1.0.0'\ntotal_index = 0\nsubjects_with_two = []\n\nfor item in np.sort(os.listdir(dir)):\n    #print('----------------------------------')\n    #print(item)\n    dir_sub = os.path.join(dir,item)\n    if(os.path.isdir(dir_sub)):\n        #print(len(os.listdir(dir_sub))//3)\n        if(len(os.listdir(dir_sub))//3 == 2):\n            total_index = total_index+1   \n            #print(item)\n            subjects_with_two.append(item)\n    #print('----------------------------------')\n\nprint(total_index)\nprint(subjects_with_two)","metadata":{"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"###### Numpy Array Creation\npath_to_dir = '../input/ecg1d/ecg-id-database-1.0.0'\ntotal_folders = 90\ncurrent_index = 0\n\nX_train = []\nX_dev = []\ny_train = []\ny_dev = []\n\n#for item in subjects_with_two:\nfor i in range(2,92):\n\n    if(i != 75):\n\n        print(i-1)\n        folder_path = os.path.join(path_to_dir,np.sort(os.listdir(path_to_dir))[i]) # Path Selection\n        #items_in_folder = int(len(folder_path)//3)\n        #current_storage_path = './5_Beat_Ecg_ECG1D'+'/person'+str(current_index)\n\n        #for j in os.listdir(item):\n\n        for j in range(2):\n\n            rec_path = folder_path+'/'+'rec'+'_'+str(j+1) # Path to Record\n            seg_signal_current = read_rec(rec_path)\n\n            if(j == 0):\n                for k in range(len(seg_signal_current)):\n                    #file_name_current = current_storage_path+'/'+str(j)+'_/'+str(k)\n                    #np.savez_compressed(file_name_current,seg_signal_current[k])\n                    X_train.append(seg_signal_current[k])\n                    y_train.append(current_index)\n\n            else:\n                for k in range(len(seg_signal_current)):\n                    #file_name_current = current_storage_path+'/'+str(j)+'_/'+str(k)\n                    #np.savez_compressed(file_name_current,seg_signal_current[k])\n                    X_dev.append(seg_signal_current[k])\n                    y_dev.append(current_index)\n\n        current_index = current_index+1\n\n###### Shuffling Numpy Arrays\nX_train,y_train = shuffle(X_train,y_train)\nX_dev,y_dev = shuffle(X_dev,y_dev)\n\n###### Saving Numpy Arrays\nnp.savez_compressed('X_train_ECG1D.npz',np.array(X_train))\nnp.savez_compressed('y_train_ECG1D.npz',np.array(y_train))\nnp.savez_compressed('X_dev_ECG1D.npz',np.array(X_dev))\nnp.savez_compressed('y_dev_ECG1D.npz',np.array(y_dev))  ","metadata":{"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"##### Loading Dataset - ECG-1D - Train\n#X_train = np.array(np.load('../input/ecg1d-osv-dataset/X_train_ECG1D_train_OSV.npz',allow_pickle=True)['arr_0'],dtype=np.float16)\n#X_dev = np.array(np.load('../input/ecg1d-osv-dataset/X_dev_ECG1D_train_OSV.npz',allow_pickle=True)['arr_0'],dtype=np.float16)\n#y_train = np.load('../input/ecg1d-osv-dataset/y_train_ECG1D_train_OSV.npz',allow_pickle=True)['arr_0']\n#y_dev = np.load('../input/ecg1d-osv-dataset/y_dev_ECG1D_train_OSV.npz',allow_pickle=True)['arr_0']\n\n##### Loading Dataset - ECG-1D - Test\nX_train = np.array(np.load('../input/ecg1d-osv-dataset/X_train_ECG1D_test_OSV.npz',allow_pickle=True)['arr_0'],dtype=np.float16)\nX_dev = np.array(np.load('../input/ecg1d-osv-dataset/X_dev_ECG1D_test_OSV.npz',allow_pickle=True)['arr_0'],dtype=np.float16)\ny_train = np.load('../input/ecg1d-osv-dataset/y_train_ECG1D_test_OSV.npz',allow_pickle=True)['arr_0']\ny_dev = np.load('../input/ecg1d-osv-dataset/y_dev_ECG1D_test_OSV.npz',allow_pickle=True)['arr_0']\n\n##### Loading Dataset - PTB\n#X_train = np.array(np.load('../input/ptb-290/X_train_PTB_290.npz',allow_pickle=True)['arr_0'],dtype=np.float16)\n#X_dev = np.array(np.load('../input/ptb-290/X_dev_PTB_290.npz',allow_pickle=True)['arr_0'],dtype=np.float16)\n#y_train = np.load('../input/ptb-290/y_train_PTB_290.npz',allow_pickle=True)['arr_0']\n#y_dev = np.load('../input/ptb-290/y_dev_PTB_290.npz',allow_pickle=True)['arr_0']\n\n##### Loading Dataset - MITBIH\n#X_train = np.array(np.load('../input/mitbih/X_train_Identification_MITBIH.npz',allow_pickle=True)['arr_0'],dtype=np.float16)\n#X_dev = np.array(np.load('../input/mitbih/X_dev_Identification_MITBIH.npz',allow_pickle=True)['arr_0'],dtype=np.float16)\n#y_train = np.load('../input/mitbih/y_train_Identification_MITBIH.npz',allow_pickle=True)['arr_0']\n#y_dev = np.load('../input/mitbih/y_dev_Identification_MITBIH.npz',allow_pickle=True)['arr_0']","metadata":{"execution":{"iopub.status.busy":"2021-07-29T11:25:32.451889Z","iopub.execute_input":"2021-07-29T11:25:32.452335Z","iopub.status.idle":"2021-07-29T11:25:32.493280Z","shell.execute_reply.started":"2021-07-29T11:25:32.452301Z","shell.execute_reply":"2021-07-29T11:25:32.492025Z"},"trusted":true},"execution_count":19,"outputs":[]},{"cell_type":"code","source":"##### Converting Labels to Categorical Format\ny_train_ohot = tf.keras.utils.to_categorical(y_train)\ny_dev_ohot = tf.keras.utils.to_categorical(y_dev)","metadata":{"execution":{"iopub.status.busy":"2021-07-29T11:25:34.790624Z","iopub.execute_input":"2021-07-29T11:25:34.791098Z","iopub.status.idle":"2021-07-29T11:25:34.797671Z","shell.execute_reply.started":"2021-07-29T11:25:34.791060Z","shell.execute_reply":"2021-07-29T11:25:34.796294Z"},"trusted":true},"execution_count":20,"outputs":[]},{"cell_type":"markdown","source":"# Model Making","metadata":{}},{"cell_type":"markdown","source":"## Self-Calibrated Convolution","metadata":{}},{"cell_type":"code","source":"###### Model Development : Self-Calibrated \n\n##### Defining Self-Calibrated Block\n\nrate_regularizer = 1e-5\nclass self_cal_Conv1D(tf.keras.layers.Layer):\n\n    \"\"\" \n    This is inherited class from keras.layers and shall be instatition of self-calibrated convolutions\n    \"\"\"\n    \n    def __init__(self,num_filters,kernel_size,num_features):\n    \n        #### Defining Essentials\n        super().__init__()\n        self.num_filters = num_filters\n        self.kernel_size = kernel_size\n        self.num_features = num_features # Number of Channels in Input\n\n        #### Defining Layers\n        self.conv2 = tf.keras.layers.Conv1D(self.num_features/2,self.kernel_size,padding='same',kernel_regularizer=tf.keras.regularizers.l2(rate_regularizer),dtype='float32',activation='relu')\n        self.conv3 = tf.keras.layers.Conv1D(self.num_features/2,self.kernel_size,padding='same',kernel_regularizer=tf.keras.regularizers.l2(rate_regularizer),dtype='float32',activation='relu')\n        self.conv4 = tf.keras.layers.Conv1D(self.num_filters/2,self.kernel_size,padding='same',activation='relu',kernel_regularizer=tf.keras.regularizers.l2(rate_regularizer),dtype='float32')\n        self.conv1 = tf.keras.layers.Conv1D(self.num_filters/2,self.kernel_size,padding='same',activation='relu',kernel_regularizer=tf.keras.regularizers.l2(rate_regularizer),dtype='float32')\n        self.upsample = tf.keras.layers.Conv1DTranspose(filters=int(self.num_features/2),kernel_size=4,strides=4)\n        #self.attention_layer = tf.keras.layers.Attention()\n        #self.lstm = tf.keras.layers.LSTM(int(self.num_features/2),return_sequences=True)\n        #self.layernorm = tf.keras.layers.LayerNormalization()\n    \n    def get_config(self):\n\n        config = super().get_config().copy()\n        config.update({\n            'num_filters': self.num_filters,\n            'kernel_size': self.kernel_size,\n            'num_features': self.num_features\n        })\n        return config\n    \n    \n    def call(self,X):\n       \n        \"\"\"\n          INPUTS : 1) X - Input Tensor of shape (batch_size,sequence_length,num_features)\n          OUTPUTS : 1) X - Output Tensor of shape (batch_size,sequence_length,num_features)\n        \"\"\"\n        \n        #### Dimension Extraction\n        b_s = (X.shape)[0] \n        seq_len = (X.shape)[1]\n        num_features = (X.shape)[2]\n        \n        #### Channel-Wise Division\n        X_attention = X[:,:,0:int(self.num_features/2)]\n        X_global = X[:,:,int(self.num_features/2):]\n        \n        #### Self Calibration Block\n\n        ### Local Feature Detection\n\n        ## Down-Sampling\n        #x1 = X_attention[:,0:int(seq_len/5),:]\n        #x2 = X_attention[:,int(seq_len/5):int(seq_len*(2/5)),:]\n        #x3 = X_attention[:,int(seq_len*(2/5)):int(seq_len*(3/5)),:]\n        #x4 = X_attention[:,int(seq_len*(3/5)):int(seq_len*(4/5)),:]\n        #x5 = X_attention[:,int(seq_len*(4/5)):seq_len,:]\n        x_down_sampled = tf.keras.layers.AveragePooling1D(pool_size=4,strides=4)(X_attention)\n        \n        ## Convoluting Down Sampled Sequence \n        #x1 = self.conv2(x1)\n        #x2 = self.conv2(x2)\n        #x3 = self.conv2(x3)\n        #x4 = self.conv2(x4)\n        #x5 = self.conv2(x5)\n        x_down_conv = self.conv2(x_down_sampled)\n        #x_down_feature = self.attention_layer([x_down_sampled,x_down_sampled])\n        #x_down_feature = self.lstm(x_down_sampled)\n        #x_down_feature = self.layernorm(x_down_feature)\n        \n        ## Up-Sampling\n        x_down_upsampled = self.upsample(x_down_conv)   \n        #X_local_upsampled = tf.keras.layers.concatenate([x1,x2,x3,x4,x5],axis=1)\n\n        ## Local-CAM\n        X_local = X_attention + x_down_upsampled  #X_local_upsampled\n\n        ## Local Importance \n        X_2 = tf.keras.activations.sigmoid(X_local)\n\n        ### Self-Calibration\n\n        ## Global Convolution\n        X_3 = self.conv3(X_attention)\n\n        ## Attention Determination\n        X_attention = tf.math.multiply(X_2,X_3)\n\n        #### Self-Calibration Feature Extraction\n        X_4 = self.conv4(X_attention)\n\n        #### Normal Feature Extraction\n        X_1 = self.conv1(X_global)\n\n        #### Concatenating and Returning Output\n        return (tf.keras.layers.concatenate([X_1,X_4],axis=2))","metadata":{"execution":{"iopub.status.busy":"2021-07-29T10:24:41.477247Z","iopub.execute_input":"2021-07-29T10:24:41.477856Z","iopub.status.idle":"2021-07-29T10:24:41.504124Z","shell.execute_reply.started":"2021-07-29T10:24:41.477657Z","shell.execute_reply":"2021-07-29T10:24:41.503292Z"},"trusted":true},"execution_count":6,"outputs":[]},{"cell_type":"markdown","source":"## Transformer","metadata":{}},{"cell_type":"code","source":"def get_angles(pos, i, d_model):\n    angle_rates = 1 / np.power(10000, (2 * (i//2)) / np.float32(d_model))\n    return pos * angle_rates\n\ndef positional_encoding(position, d_model):\n    angle_rads = get_angles(np.arange(position)[:, np.newaxis],\n                          np.arange(d_model)[np.newaxis, :],\n                          d_model)\n  \n  # apply sin to even indices in the array; 2i\n    angle_rads[:, 0::2] = np.sin(angle_rads[:, 0::2])\n  \n  # apply cos to odd indices in the array; 2i+1\n    angle_rads[:, 1::2] = np.cos(angle_rads[:, 1::2])\n    \n    pos_encoding = angle_rads[np.newaxis, ...]\n    \n    return tf.cast(pos_encoding, dtype=tf.float32)\n\ndef create_padding_mask(seq):\n    seq = tf.cast(tf.math.equal(seq, 0), tf.float32)\n  \n    # add extra dimensions to add the padding\n    # to the attention logits. \n    return seq[:, tf.newaxis, tf.newaxis, :]  # (batch_size, 1, 1, seq_len)\n\ndef scaled_dot_product_attention(q, k, v, mask):\n    \"\"\"Calculate the attention weights.\n    q, k, v must have matching leading dimensions.\n    k, v must have matching penultimate dimension, i.e.: seq_len_k = seq_len_v.\n    The mask has different shapes depending on its type(padding or look ahead) \n    but it must be broadcastable for addition.\n\n    Args:\n    q: query shape == (..., seq_len_q, depth)\n    k: key shape == (..., seq_len_k, depth)\n    v: value shape == (..., seq_len_v, depth_v)\n    mask: Float tensor with shape broadcastable \n          to (..., seq_len_q, seq_len_k). Defaults to None.\n\n    Returns:\n    output, attention_weights\n    \"\"\"\n\n    matmul_qk = tf.matmul(q, k, transpose_b=True)  # (..., seq_len_q, seq_len_k)\n  \n    # scale matmul_qk\n    dk = tf.cast(tf.shape(k)[-1], tf.float32)\n    scaled_attention_logits = matmul_qk / tf.math.sqrt(dk)\n\n    # add the mask to the scaled tensor.\n    if mask is not None:\n        scaled_attention_logits += (mask * -1e9)  \n\n    # softmax is normalized on the last axis (seq_len_k) so that the scores\n    # add up to 1.\n    attention_weights = tf.nn.softmax(scaled_attention_logits, axis=-1)  # (..., seq_len_q, seq_len_k)\n\n    output = tf.matmul(attention_weights, v)  # (..., seq_len_q, depth_v)\n\n    return output, attention_weights\n\nclass MultiHeadAttention(tf.keras.layers.Layer):\n    def __init__(self, d_model, num_heads):\n        super(MultiHeadAttention, self).__init__()\n        self.num_heads = num_heads\n        self.d_model = d_model\n\n        assert d_model % self.num_heads == 0\n\n        self.depth = d_model // self.num_heads\n\n        self.wq = tf.keras.layers.Dense(d_model)\n        self.wk = tf.keras.layers.Dense(d_model)\n        self.wv = tf.keras.layers.Dense(d_model)\n\n        self.dense = tf.keras.layers.Dense(d_model)\n\n    def get_config(self):\n        config = super(MultiHeadAttention, self).get_config().copy()\n        config.update({\n            'd_model': self.d_model,\n            'num_heads':self.num_heads\n        })\n        \n    def split_heads(self, x, batch_size):\n        \n        \"\"\"Split the last dimension into (num_heads, depth).\n        Transpose the result such that the shape is (batch_size, num_heads, seq_len, depth)\n        \"\"\"\n        x = tf.reshape(x, (batch_size, -1, self.num_heads, self.depth))\n        return tf.transpose(x, perm=[0, 2, 1, 3])\n    \n    def call(self, v, k, q, mask):\n        batch_size = tf.shape(q)[0]\n\n        q = self.wq(q)  # (batch_size, seq_len, d_model)\n        k = self.wk(k)  # (batch_size, seq_len, d_model)\n        v = self.wv(v)  # (batch_size, seq_len, d_model)\n\n        q = self.split_heads(q, batch_size)  # (batch_size, num_heads, seq_len_q, depth)\n        k = self.split_heads(k, batch_size)  # (batch_size, num_heads, seq_len_k, depth)\n        v = self.split_heads(v, batch_size)  # (batch_size, num_heads, seq_len_v, depth)\n\n        # scaled_attention.shape == (batch_size, num_heads, seq_len_q, depth)\n        # attention_weights.shape == (batch_size, num_heads, seq_len_q, seq_len_k)\n        scaled_attention, attention_weights = scaled_dot_product_attention(\n            q, k, v, mask)\n\n        scaled_attention = tf.transpose(scaled_attention, perm=[0, 2, 1, 3])  # (batch_size, seq_len_q, num_heads, depth)\n\n        concat_attention = tf.reshape(scaled_attention, \n                                      (batch_size, -1, self.d_model))  # (batch_size, seq_len_q, d_model)\n\n        output = self.dense(concat_attention)  # (batch_size, seq_len_q, d_model)\n\n        return output, attention_weights\n\ndef point_wise_feed_forward_network(d_model, dff):\n    return tf.keras.Sequential([\n      tf.keras.layers.Dense(dff, activation='relu'),  # (batch_size, seq_len, dff)\n      tf.keras.layers.Dense(d_model)  # (batch_size, seq_len, d_model)\n  ])\n\nclass Encoder(tf.keras.layers.Layer):\n    def __init__(self, num_layers, d_model, num_heads, dff,\n               maximum_position_encoding, rate=0.1):\n        super(Encoder, self).__init__()\n\n        self.d_model = d_model\n        self.num_layers = num_layers\n        self.num_heads = num_heads\n        self.dff = dff\n        self.maximum_position_encoding = maximum_position_encoding\n        self.rate = rate\n\n        #self.embedding = tf.keras.layers.Embedding(input_vocab_size, d_model)\n        self.pos_encoding = positional_encoding(maximum_position_encoding, \n                                                self.d_model)\n\n\n        self.enc_layers = [EncoderLayer(d_model, num_heads, dff, rate) \n                           for _ in range(num_layers)]\n\n        self.dropout = tf.keras.layers.Dropout(rate)\n        \n    def get_config(self):\n        config = super(Encoder, self).get_config().copy()\n        config.update({\n            'num_layers': self.num_layers,\n            'd_model': self.d_model,\n            'num_heads':self.num_heads,\n            'dff':self.dff,\n            'maximum_position_encoding':self.maximum_position_encoding,\n            'rate':self.rate  \n        })\n        \n    def call(self, x, training, mask):\n\n        seq_len = tf.shape(x)[1]\n\n        # adding embedding and position encoding.\n        #x = self.embedding(x)  # (batch_size, input_seq_len, d_model)\n        x *= tf.math.sqrt(tf.cast(self.d_model, tf.float32))\n        x += self.pos_encoding[:, :seq_len, :]\n\n        x = self.dropout(x, training=training)         \n\n        for i in range(self.num_layers):\n            x = self.enc_layers[i](x, training, mask)\n\n        return x  # (batch_size, input_seq_len, d_model)\n\nclass EncoderLayer(tf.keras.layers.Layer):\n    def __init__(self, d_model, num_heads, dff, rate=0.1):\n        super(EncoderLayer, self).__init__()\n        \n        self.d_model = d_model\n        self.num_heads = num_heads\n        self.dff = dff\n        self.rate = rate\n\n        self.mha = MultiHeadAttention(d_model, num_heads)\n        self.ffn = point_wise_feed_forward_network(d_model, dff)\n\n        self.layernorm1 = tf.keras.layers.LayerNormalization(epsilon=1e-6)\n        self.layernorm2 = tf.keras.layers.LayerNormalization(epsilon=1e-6)\n\n        self.dropout1 = tf.keras.layers.Dropout(rate)\n        self.dropout2 = tf.keras.layers.Dropout(rate)\n        \n    def get_config(self):\n        config = super(EncoderLayer, self).get_config().copy()\n        config.update({\n            'd_model': self.d_model,\n            'num_heads':self.num_heads,\n            'dff':self.dff,\n            'rate':self.rate  \n        })\n\n    def call(self, x, training, mask):\n\n        attn_output, _ = self.mha(x, x, x, mask)  # (batch_size, input_seq_len, d_model)\n        attn_output = self.dropout1(attn_output, training=training)\n        out1 = self.layernorm1(x + attn_output)  # (batch_size, input_seq_len, d_model)\n\n        ffn_output = self.ffn(out1)  # (batch_size, input_seq_len, d_model)\n        ffn_output = self.dropout2(ffn_output, training=training)\n        out2 = self.layernorm2(out1 + ffn_output)  # (batch_size, input_seq_len, d_model)\n    \n        return out2\n    \nclass Transformer(tf.keras.Model):\n    def __init__(self, num_layers, d_model, num_heads, dff, \n                 pe_input, rate=0.1):\n        super(Transformer, self).__init__()\n        \n        self.num_layers = num_layers\n        self.d_model = d_model\n        self.num_heads = num_heads\n        self.dff = dff\n        self.pe_input = pe_input\n        self.rate = rate\n        \n        self.encoder = Encoder(num_layers, d_model, num_heads, dff, \n                                pe_input, rate)\n        \n    def get_config(self):\n        config = super(Transformer,self).get_config().copy()\n        config.update({\n            'num_layers': self.num_layers,\n            'd_model': self.d_model,\n            'num_heads':self.num_heads,\n            'dff':self.dff,\n            'pe_input':self.pe_input,\n            'rate':self.rate  \n        })\n    \n    def call(self, inp, training, enc_padding_mask):\n        return self.encoder(inp, training, enc_padding_mask)","metadata":{"execution":{"iopub.status.busy":"2021-07-29T10:24:41.506332Z","iopub.execute_input":"2021-07-29T10:24:41.506813Z","iopub.status.idle":"2021-07-29T10:24:41.555678Z","shell.execute_reply.started":"2021-07-29T10:24:41.506765Z","shell.execute_reply":"2021-07-29T10:24:41.554472Z"},"trusted":true},"execution_count":7,"outputs":[]},{"cell_type":"markdown","source":"## ArcFace Loss","metadata":{}},{"cell_type":"code","source":"###### ArcFace Layer\n\n##### Layer \nclass ArcFace(tf.keras.layers.Layer):\n\n    \"\"\" ArcFace Logits Generation Layer \"\"\"\n\n    def __init__(self,num_classes,s,m,input_embedding_dims):\n\n        #### Defining Essentials\n        super().__init__()\n        self.num_classes = num_classes # Number of Classes in the Outut\n        self.s = s # Geodesic Scaled Distance\n        self.m = m # Additive Angular Margin\n        self.input_embedding_dims = input_embedding_dims # Dimesnsion of Input to the ArcFace Layer\n\n        #### Defining Weight\n        self.W = self.add_weight(shape=(self.input_embedding_dims, self.num_classes),\n                                initializer='glorot_uniform',\n                                trainable=True,\n                                )\n\n    def get_config(self):\n\n        config = super().get_config().copy()\n        config.update({\n            'num_classes': self.num_classes,\n            's': self.s,\n            'm': self.m,\n            'input_embedding_dims': self.input_embedding_dims\n        })\n        return config\n\n    \n    def call(self,X):\n\n        #### Normalizing Layers\n        W = tf.nn.l2_normalize(self.W, axis=0)\n        X = tf.nn.l2_normalize(X, axis=1)\n\n        #### Logits Computation\n        logits = X @ W\n        #z = tf.linalg.matmul(X,W) #Logits\n\n        #### Additive Angular Margin \n        #theta = tf.acos(tf.keras.backend.clip(logits, -1.0 + tf.keras.backend.epsilon(), 1.0 - tf.keras.backend.epsilon())) #tf.math.acos(z)\n        #z = self.s*(tf.math.cos(theta+(self.m*y)))  # Final Logits\n\n        return logits  #tf.keras.layers.Softmax(axis=1)(z) # Returning Softmax Activated Result \n\n##### Loss \nclass ArcLoss(tf.keras.losses.Loss):\n\n    def __init__(self,num_classes,s,m):\n\n        #### Defining Essentials\n        super().__init__()\n        self.num_classes = num_classes # Number of Classes in the Outut\n        self.s = s # Geodesic Scaled Distance\n        self.m = m # Additive Angular Margin\n\n    def get_config(self):\n\n        config = super().get_config().copy()\n        config.update({\n            'num_classes': self.num_classes,\n            's': self.s,\n            'm': self.m\n        })\n        return config\n\n    def call(self,y_true,logits):\n        \n        theta = tf.acos(tf.keras.backend.clip(logits, -1.0 + tf.keras.backend.epsilon(), 1.0 - tf.keras.backend.epsilon()))\n        target_logits = tf.cos(theta + self.m)\n        # sin = tf.sqrt(1 - logits**2)\n        # cos_m = tf.cos(logits)\n        # sin_m = tf.sin(logits)\n        # target_logits = logits * cos_m - sin * sin_m\n        #\n        logits = logits * (1 - y_true) + target_logits * y_true\n        # feature re-scale\n        logits *= self.s\n        out = tf.nn.softmax(logits)    \n        \n        return tf.keras.losses.categorical_crossentropy(y_true,out)\n\n        #z = self.s*(tf.math.cos(theta+(self.m*y_true)))\n        #final_logits = tf.keras.layers.Softmax(axis=1)(z) # Softmax Operated Final Logits\n        #return ","metadata":{"execution":{"iopub.status.busy":"2021-07-29T10:24:41.557111Z","iopub.execute_input":"2021-07-29T10:24:41.557665Z","iopub.status.idle":"2021-07-29T10:24:41.576921Z","shell.execute_reply.started":"2021-07-29T10:24:41.557622Z","shell.execute_reply":"2021-07-29T10:24:41.575841Z"},"trusted":true},"execution_count":8,"outputs":[]},{"cell_type":"markdown","source":"# Model Training","metadata":{}},{"cell_type":"code","source":"####### Phase-1 Models\n###### Defining Architecture\n\nwith tpu_strategy.scope():\n\n    ##### SC_Module \n\n    #### Defining Hyperparameters\n    num_layers = 2\n    d_model = 512\n    num_heads = 8\n    dff = 1024\n    max_seq_len = 512 #X_train.shape[1]\n    pe_input = 320\n    rate = 0.5\n    num_features = 1\n    num_classes = 72\n\n    #### Defining Layers\n    Input_layer = tf.keras.layers.Input(shape=(max_seq_len,num_features))\n    self_conv1 = self_cal_Conv1D(128,15,128)\n    self_conv2 = self_cal_Conv1D(128,20,128) # Newly Added\n    self_conv3 = self_cal_Conv1D(256,15,128)\n    self_conv4 = self_cal_Conv1D(256,20,256) # Newly Added\n    self_conv5 = self_cal_Conv1D(512,15,256)\n    self_conv6 = self_cal_Conv1D(512,20,512) # Newly Added\n    self_conv7 = self_cal_Conv1D(1024,3,512)\n    self_conv8 = self_cal_Conv1D(1024,5,1024) # Newly Added\n    conv_initial = tf.keras.layers.Conv1D(32,15,padding='same',activation='relu')\n    conv_second = tf.keras.layers.Conv1D(64,15,padding='same',activation='relu')\n    conv_third = tf.keras.layers.Conv1D(128,15,padding='same',activation='relu')\n    #lstm1 = tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(128,activation='tanh',return_sequences=True),merge_mode='ave')\n    transform_1 = tf.keras.layers.Conv1D(128,3,padding='same',kernel_initializer='lecun_normal', activation='selu')\n    transform_2 = tf.keras.layers.Conv1D(256,3,padding='same',kernel_initializer='lecun_normal', activation='selu')\n    transform_3 = tf.keras.layers.Conv1D(512,3,padding='same',kernel_initializer='lecun_normal', activation='selu')\n    transform_4 = tf.keras.layers.Conv1D(1024,3,padding='same',kernel_initializer='lecun_normal', activation='selu')\n    transformer = Transformer(num_layers,d_model,num_heads,dff,pe_input,rate)\n    gap_layer = tf.keras.layers.GlobalAveragePooling1D()\n    ArcFace_Layer = ArcFace(72,30.0,0.3,256)\n    ArcFace_Loss = ArcLoss(72,30.0,0.3)\n    #arc_logit_layer = ArcFace(89,30.0,0.3,tf.keras.regularizers.l2(1e-4))\n\n    #### Defining Architecture\n    ### Input Layer\n    Inputs = Input_layer\n    #Input_Labels = tf.keras.layers.Input(shape=(num_classes,))\n\n    ### Initial Convolutional Layers\n    conv_initial = conv_initial(Inputs)\n    #conv_initial = tf.keras.layers.LayerNormalization()(conv_initial)\n    #conv_initial = tf.keras.layers.MaxPool1D(pool_size=2,strides=2)(conv_initial)     \n    #conv_initial = tf.keras.layers.Add()([conv_initial,Inputs])\n    \n    conv_second = conv_second(conv_initial)\n    #conv_second = tf.keras.layers.LayerNormalization()(conv_second)\n    #conv_second = tf.keras.layers.MaxPool1D(pool_size=2,strides=2)(conv_second)\n    #conv_second = tf.keras.layers.Add()([conv_second,conv_initial])\n    #conv_second = tf.keras.layers.concatenate(axis=2)([conv_initial,conv_second])\n    \n    conv_third = conv_third(conv_second)\n    #conv_third = tf.keras.layers.LayerNormalization()(conv_third)\n    #conv_third = tf.keras.layers.MaxPool1D(pool_size=2,strides=2)(conv_third)\n    #mask = tf.keras.layers.MaxPool1D(pool_size=2,strides=2)(Inputs)\n    #conv_third = tf.keras.layers.Add()([conv_third,conv_second])\n    #conv_third = tf.keras.layers.concatenate(axis=2)([conv_initial,conv_second,conv_third])\n    #conv_third = lstm1(conv_second)\n    #conv_third = tf.keras.layers.Attention()([conv_third,conv_third])\n    \n    ### 1st Residual Block\n    transform_1 = transform_1(conv_third)\n    conv1 = self_conv1(conv_third)\n    #conv1 = tf.keras.layers.AlphaDropout(rate=0.2)(conv1)\n    conv2 = self_conv2(conv1)\n    #conv2 = tf.keras.layers.AlphaDropout(rate=0.2)(conv2)\n    conv2 = tf.keras.layers.Add()([conv2,transform_1])\n    #conv2 = tf.keras.layers.LayerNormalization()(conv2)\n    conv2 = tf.keras.layers.MaxPool1D(pool_size=2,strides=2)(conv2)\n    #mask = tf.keras.layers.MaxPool1D(pool_size=2,strides=2)(mask)    \n\n    ### 2nd Residual Block\n    #conv_third = tf.keras.layers.Attention()([conv_third,conv_third])\n    transform_2 = transform_2(conv2)\n    conv3 = self_conv3(conv2)\n    #conv3 = tf.keras.layers.AlphaDropout(rate=0.2)(conv3)\n    conv4 = self_conv4(conv3)\n    #conv4 = tf.keras.layers.AlphaDropout(rate=0.2)(conv4)\n    conv4 = tf.keras.layers.Add()([conv4,transform_2])\n    #conv4 = tf.keras.layers.LayerNormalization()(conv4)\n    conv4 = tf.keras.layers.MaxPool1D(pool_size=2,strides=2)(conv4)\n    #mask = tf.keras.layers.MaxPool1D(pool_size=2,strides=2)(mask)\n\n    ### 3rd Residual Block\n    transform_3 = transform_3(conv4)\n    conv5 = self_conv5(conv4)\n    #conv5 = tf.keras.layers.AlphaDropout(rate=0.2)(conv5)\n    conv6 = self_conv6(conv5)\n    #conv6 = tf.keras.layers.AlphaDropout(rate=0.2)(conv6)\n    conv6 = tf.keras.layers.Add()([conv6,transform_3])\n    #conv6 = tf.keras.layers.LayerNormalization()(conv6)\n    #conv6 = tf.keras.layers.MaxPool1D(pool_size=2,strides=2)(conv6)\n\n    ### 4th Residual Block\n    #transform_4 = transform_4(conv6)\n    #conv7 = self_conv7(conv6)\n    #conv8 = self_conv8(conv7)\n    #conv8 = tf.keras.layers.Add()([conv8,transform_4])\n\n    ### Transformer\n    ## Wide-Head Attention Model\n    #tx_embedding = tf.keras.layers.Lambda(PE_Layer)(Inputs)\n    #tx_embedding = tf.keras.layers.Dropout(rate)(tx_embedding,training=True)\n    #mask_reshaped = tf.keras.layers.Reshape((max_seq_len,))(Inputs)\n    #encoder_op1 = encoder_block1(tx_embedding,mask_reshaped)\n    #encoder_op2 = encoder_block2(encoder_op1,mask_reshaped)\n\n    ## Narrow-Head Attention Model\n    #mask_reshaped = tf.keras.layers.Reshape((160,))(mask)\n    embeddings =  transformer(inp=conv6,enc_padding_mask=None)\n    #embeddings = transformer(inp=conv6,enc_padding_mask=create_padding_mask(mask))\n    #residual_embeddings = tf.keras.layers.Add()([conv6,embeddings])\n\n    ### Output Layers\n    ## Initial Layers\n    gap_op = gap_layer(embeddings)\n    dense1 = tf.keras.layers.Dense(256,activation='relu')(gap_op)\n    dropout1 = tf.keras.layers.Dropout(rate)(dense1)\n    \n    ## ArcFace Output Network\n    dense2 = tf.keras.layers.Dense(256,kernel_initializer='he_normal',\n                kernel_regularizer=tf.keras.regularizers.l2(1e-4))(dropout1)\n    dense3 = ArcFace_Layer(dense2)\n    ##dense2 = tf.keras.layers.BatchNormalization()(dense2)\n    #dense3 = arc_logit_layer(([dense2,Input_Labels]))\n    \n    ## Softmax Output Network\n    #dense2 = tf.keras.layers.Dense(256,activation='relu')(dropout1)\n    ###dropout2 = tf.keras.layers.Dropout(rate)(dense2) # Not to be included\n    #dense3 = tf.keras.layers.Dense(35,activation='softmax')(dense2)\n\n    #### Compiling Architecture            \n    ### ArcFace Model Compilation\n    #model = tf.keras.models.Model(inputs=Inputs,outputs=dense3)\n    ### Softmax Model Compilation\n    #model = tf.keras.models.Model(inputs=Inputs,outputs=dense3)\n    \n    #model.load_weights('../input/ecg1d-models/Identification_ECG1D.h5')\n    model = tf.keras.models.Model(inputs=Inputs,outputs=dense3)\n    model.load_weights('./Incremental-MITBIH.h5')\n    model.compile(optimizer=tf.keras.optimizers.Adam(lr=1e-4,clipnorm=1.0),loss=ArcFace_Loss,metrics=['accuracy'])\n\nmodel.summary()      \ntf.keras.utils.plot_model(model)\n##### Model Training \n\n#### Model Checkpointing\nfilepath = './Incremental-MITBIH.h5'\ncheckpoint = tf.keras.callbacks.ModelCheckpoint(filepath,monitor='val_accuracy',save_best_only=True,mode='max',save_weights_only=True)\n\n#### Model Training\n#model.fit(X_train,y_train_ohot,epochs=250,batch_size=128,\n#          validation_data=(X_dev,y_dev_ohot),validation_batch_size=128,\n#         callbacks=checkpoint)\n\n##### Plotting Metrics  \n#### Accuracy and Loss Plots \n\n### Accuracy\n#plt.plot(history.history['accuracy'])\n#plt.plot(history.history['val_accuracy'])\n#plt.title('Model Accuracy')\n#plt.ylabel('Accuracy')\n#plt.xlabel('Epoch')  \n#plt.legend(['Train', 'Validation'], loc='best')\n#plt.show()\n\n### Loss     \n#plt.plot(history.history['loss'])  \n#plt.plot(history.history['val_loss'])\n#plt.title('Model Loss')  \n#plt.ylabel('Loss')         \n#plt.xlabel('epoch')\n#plt.legend(['Train', 'Validation'], loc='best')   \n#plt.show()","metadata":{"execution":{"iopub.status.busy":"2021-07-29T11:01:43.927386Z","iopub.execute_input":"2021-07-29T11:01:43.927816Z","iopub.status.idle":"2021-07-29T11:01:47.863418Z","shell.execute_reply.started":"2021-07-29T11:01:43.927776Z","shell.execute_reply":"2021-07-29T11:01:47.862329Z"},"trusted":true},"execution_count":15,"outputs":[{"name":"stdout","text":"Model: \"model_5\"\n__________________________________________________________________________________________________\nLayer (type)                    Output Shape         Param #     Connected to                     \n==================================================================================================\ninput_5 (InputLayer)            [(None, 512, 1)]     0                                            \n__________________________________________________________________________________________________\nconv1d_149 (Conv1D)             (None, 512, 32)      512         input_5[0][0]                    \n__________________________________________________________________________________________________\nconv1d_150 (Conv1D)             (None, 512, 64)      30784       conv1d_149[0][0]                 \n__________________________________________________________________________________________________\nconv1d_151 (Conv1D)             (None, 512, 128)     123008      conv1d_150[0][0]                 \n__________________________________________________________________________________________________\nself_cal__conv1d_24 (self_cal_C (None, 512, 128)     262464      conv1d_151[0][0]                 \n__________________________________________________________________________________________________\nself_cal__conv1d_25 (self_cal_C (None, 512, 128)     344384      self_cal__conv1d_24[0][0]        \n__________________________________________________________________________________________________\nconv1d_152 (Conv1D)             (None, 512, 128)     49280       conv1d_151[0][0]                 \n__________________________________________________________________________________________________\nadd_9 (Add)                     (None, 512, 128)     0           self_cal__conv1d_25[0][0]        \n                                                                 conv1d_152[0][0]                 \n__________________________________________________________________________________________________\nmax_pooling1d_6 (MaxPooling1D)  (None, 256, 128)     0           add_9[0][0]                      \n__________________________________________________________________________________________________\nself_cal__conv1d_26 (self_cal_C (None, 256, 256)     385472      max_pooling1d_6[0][0]            \n__________________________________________________________________________________________________\nself_cal__conv1d_27 (self_cal_C (None, 256, 256)     1376896     self_cal__conv1d_26[0][0]        \n__________________________________________________________________________________________________\nconv1d_153 (Conv1D)             (None, 256, 256)     98560       max_pooling1d_6[0][0]            \n__________________________________________________________________________________________________\nadd_10 (Add)                    (None, 256, 256)     0           self_cal__conv1d_27[0][0]        \n                                                                 conv1d_153[0][0]                 \n__________________________________________________________________________________________________\nmax_pooling1d_7 (MaxPooling1D)  (None, 128, 256)     0           add_10[0][0]                     \n__________________________________________________________________________________________________\nself_cal__conv1d_28 (self_cal_C (None, 128, 512)     1540992     max_pooling1d_7[0][0]            \n__________________________________________________________________________________________________\nself_cal__conv1d_29 (self_cal_C (None, 128, 512)     5506304     self_cal__conv1d_28[0][0]        \n__________________________________________________________________________________________________\nconv1d_154 (Conv1D)             (None, 128, 512)     393728      max_pooling1d_7[0][0]            \n__________________________________________________________________________________________________\nadd_11 (Add)                    (None, 128, 512)     0           self_cal__conv1d_29[0][0]        \n                                                                 conv1d_154[0][0]                 \n__________________________________________________________________________________________________\ntransformer_3 (Transformer)     (None, 128, 512)     4205568     add_11[0][0]                     \n__________________________________________________________________________________________________\nglobal_average_pooling1d_3 (Glo (None, 512)          0           transformer_3[0][0]              \n__________________________________________________________________________________________________\ndense_54 (Dense)                (None, 256)          131328      global_average_pooling1d_3[0][0] \n__________________________________________________________________________________________________\ndropout_23 (Dropout)            (None, 256)          0           dense_54[0][0]                   \n__________________________________________________________________________________________________\ndense_55 (Dense)                (None, 256)          65792       dropout_23[0][0]                 \n__________________________________________________________________________________________________\narc_face_4 (ArcFace)            (None, 72)           18432       dense_55[0][0]                   \n==================================================================================================\nTotal params: 14,533,504\nTrainable params: 14,533,504\nNon-trainable params: 0\n__________________________________________________________________________________________________\n","output_type":"stream"}]},{"cell_type":"markdown","source":"# Incremental Learning","metadata":{}},{"cell_type":"code","source":"###### Base Model \n\nwith tpu_strategy.scope():\n    predictive_model = tf.keras.models.Model(inputs=model.input,outputs=model.layers[-2].output)\n    predictive_model.compile(tf.keras.optimizers.Adam(lr=1e-4),loss='categorical_crossentropy',metrics=['accuracy'])\n\n###### Incremental Learning Model\n\nwith tpu_strategy.scope():\n    \n    ArcFace_Layer = ArcFace(17,30.0,0.3,256)\n    ArcFace_Loss = ArcLoss(17,30.0,0.3)\n    \n    Input_Layer = tf.keras.layers.Input((512,1))\n    op_preds = predictive_model(Input_Layer)\n    final_logits = ArcFace_Layer(op_preds)\n\n    testing_model = tf.keras.models.Model(inputs=Input_Layer,outputs=final_logits)\n    testing_model.load_weights('./Identification_ECG1D_Incremental_OSV_Final.h5')\n    testing_model.compile(optimizer=tf.keras.optimizers.Adam(lr=1e-4,clipnorm=1.0),loss=ArcFace_Loss,metrics=['accuracy'])\n\ntesting_model.summary()      \ntf.keras.utils.plot_model(testing_model)        \n\n#### Model Checkpointing\nfilepath = './Identification_ECG1D_Incremental_OSV_Final.h5'\ncheckpoint = tf.keras.callbacks.ModelCheckpoint(filepath,monitor='val_accuracy',save_best_only=True,mode='max',save_weights_only=True)\n\n#### Model Training\n#testing_model.fit(X_train,y_train_ohot,epochs=500,batch_size=128,\n#          validation_data=(X_dev,y_dev_ohot),validation_batch_size=128,\n#        callbacks=checkpoint)  ","metadata":{"execution":{"iopub.status.busy":"2021-07-29T11:25:48.656475Z","iopub.execute_input":"2021-07-29T11:25:48.656854Z","iopub.status.idle":"2021-07-29T11:25:51.066819Z","shell.execute_reply.started":"2021-07-29T11:25:48.656823Z","shell.execute_reply":"2021-07-29T11:25:51.065466Z"},"trusted":true},"execution_count":21,"outputs":[{"name":"stdout","text":"Model: \"model_9\"\n_________________________________________________________________\nLayer (type)                 Output Shape              Param #   \n=================================================================\ninput_7 (InputLayer)         [(None, 512, 1)]          0         \n_________________________________________________________________\nmodel_8 (Functional)         (None, 256)               14515072  \n_________________________________________________________________\narc_face_6 (ArcFace)         (None, 17)                4352      \n=================================================================\nTotal params: 14,519,424\nTrainable params: 14,519,424\nNon-trainable params: 0\n_________________________________________________________________\n","output_type":"stream"}]},{"cell_type":"markdown","source":"# Model Testing","metadata":{}},{"cell_type":"markdown","source":"## KNN Testing Based ArcFace Loss","metadata":{}},{"cell_type":"code","source":"###### Testing Model - ArcFace Style\nwith tpu_strategy.scope():     \n\n    def normalisation_layer(x):   \n        return(tf.math.l2_normalize(x, axis=1, epsilon=1e-12))\n\n    #X_dev_flipped = tf.image.flip_up_down(X_dev)  \n    #x_train_flipped = tf.image.flip_up_down(X_train_final)\n\n    feature_model = tf.keras.models.Model(inputs=model.input,outputs=testing_model.layers[-2].output)\n    feature_model.compile(tf.keras.optimizers.Adam(lr=1e-4),loss='categorical_crossentropy',metrics=['accuracy'])\n    feature_model.summary()\n\nwith tpu_strategy.scope():\n    #y_in = tf.keras.layers.Input((89,))\n\n    Input_Layer = tf.keras.layers.Input((512,1))\n    op_1 = feature_model(Input_Layer)\n\n    ##Input_Layer_Flipped = tf.keras.layers.Input((224,224,3))\n    ##op_2 = predictive_model([Input_Layer_Flipped,y_in]) \n    ##final_op = tf.keras.layers.Concatenate(axis=1)(op_1)\n\n    final_norm_op = tf.keras.layers.Lambda(normalisation_layer)(op_1)\n\n    final_model = tf.keras.models.Model(inputs=Input_Layer,outputs=final_norm_op)\n    final_model.compile(tf.keras.optimizers.Adam(lr=1e-4),loss='categorical_crossentropy',metrics=['accuracy'])\n    final_model.summary()\n\n##### Nearest Neighbor Classification \nfrom sklearn.neighbors import KNeighborsClassifier\nTest_Embeddings = final_model.predict(X_dev)\nTrain_Embeddings = final_model.predict(X_train)\n\ncol_mean = np.nanmean(Test_Embeddings, axis=0)\ninds = np.where(np.isnan(Test_Embeddings))\n#print(inds)\nTest_Embeddings[inds] = np.take(col_mean, inds[1])\n\ncol_mean = np.nanmean(Train_Embeddings, axis=0)\ninds = np.where(np.isnan(Train_Embeddings))\n#print(inds)\nTrain_Embeddings[inds] = np.take(col_mean, inds[1])\n\n#Test_Embeddings = np.nan_to_num(Test_Embeddings)\n\n##### Refining Test Embeddings\n#for i in range(Train_Embeddings.shape[0]):\n#    for j in range(Train_Embeddings.shape[1]):\n#        if(math.isnan(Train_Embeddings[i,j])):\n#            Train_Embeddings[i,j] == 0\n#        if(Train_Embeddings[i,j]>1e4):\n#            Train_Embeddings[i,j] == 1e4\n\n##### Refining Train Embeddings    \n#for i in range(Test_Embeddings.shape[0]):\n#    for j in range(Test_Embeddings.shape[1]):\n#        if(math.isnan(Test_Embeddings[i,j])):\n#            Test_Embeddings[i,j] == 0\n#        if(Test_Embeddings[i,j]>1e4 or math.isinf(Test_Embeddings[i,j])):\n#            Test_Embeddings[i,j] == 1e4\n\n#del(X_train_final,X_dev,X_dev_flipped,x_train_flipped)\n#gc.collect()\n\nTest_Accuracy_With_Train = []\nTest_Accuracy_With_Test = []\n                                                                     \nfor k in range(1,11):\n    knn = KNeighborsClassifier(n_neighbors=k,metric='euclidean')\n    knn.fit(Train_Embeddings,y_train)\n    Test_Accuracy_With_Train.append(knn.score(Test_Embeddings,y_dev))\n    knn.fit(Test_Embeddings,y_dev)\n    Test_Accuracy_With_Test.append(knn.score(Test_Embeddings,y_dev))\n\nprint('--------------------------------')\nprint(np.max(Test_Accuracy_With_Train))\nprint(np.max(Test_Accuracy_With_Test))\nprint('--------------------------------')\nprint(np.mean(Test_Accuracy_With_Train))\nprint(np.mean(Test_Accuracy_With_Test))\nprint('--------------------------------')\nprint((Test_Accuracy_With_Train)[0])\nprint((Test_Accuracy_With_Test)[0])\nprint('--------------------------------')\n\nplt.plot(np.arange(1,11),np.array(Test_Accuracy_With_Train),label='Test_Accuracy_With_Train')\nplt.plot(np.arange(1,11),np.array(Test_Accuracy_With_Test),label='Test_Accuracy_With_Test')\nplt.title('Testing Accuracy vs Number of Neighbors')\nplt.xlabel('Number of Neighbors')\nplt.ylabel('Test Accuracy')\nplt.legend()       \nplt.show()  \n\nnp.savez_compressed('TesEmb_ECG1D_MITBIH.npz',Test_Embeddings)\nnp.savez_compressed('TrainEmb_ECG1D_MITBIH.npz',Train_Embeddings)","metadata":{"execution":{"iopub.status.busy":"2021-07-29T11:27:18.181730Z","iopub.execute_input":"2021-07-29T11:27:18.182184Z","iopub.status.idle":"2021-07-29T11:27:26.105618Z","shell.execute_reply.started":"2021-07-29T11:27:18.182136Z","shell.execute_reply":"2021-07-29T11:27:26.104572Z"},"trusted":true},"execution_count":24,"outputs":[{"name":"stdout","text":"Model: \"model_14\"\n__________________________________________________________________________________________________\nLayer (type)                    Output Shape         Param #     Connected to                     \n==================================================================================================\ninput_5 (InputLayer)            [(None, 512, 1)]     0                                            \n__________________________________________________________________________________________________\nconv1d_149 (Conv1D)             (None, 512, 32)      512         input_5[0][0]                    \n__________________________________________________________________________________________________\nconv1d_150 (Conv1D)             (None, 512, 64)      30784       conv1d_149[0][0]                 \n__________________________________________________________________________________________________\nconv1d_151 (Conv1D)             (None, 512, 128)     123008      conv1d_150[0][0]                 \n__________________________________________________________________________________________________\nself_cal__conv1d_24 (self_cal_C (None, 512, 128)     262464      conv1d_151[0][0]                 \n__________________________________________________________________________________________________\nself_cal__conv1d_25 (self_cal_C (None, 512, 128)     344384      self_cal__conv1d_24[0][0]        \n__________________________________________________________________________________________________\nconv1d_152 (Conv1D)             (None, 512, 128)     49280       conv1d_151[0][0]                 \n__________________________________________________________________________________________________\nadd_9 (Add)                     (None, 512, 128)     0           self_cal__conv1d_25[0][0]        \n                                                                 conv1d_152[0][0]                 \n__________________________________________________________________________________________________\nmax_pooling1d_6 (MaxPooling1D)  (None, 256, 128)     0           add_9[0][0]                      \n__________________________________________________________________________________________________\nself_cal__conv1d_26 (self_cal_C (None, 256, 256)     385472      max_pooling1d_6[0][0]            \n__________________________________________________________________________________________________\nself_cal__conv1d_27 (self_cal_C (None, 256, 256)     1376896     self_cal__conv1d_26[0][0]        \n__________________________________________________________________________________________________\nconv1d_153 (Conv1D)             (None, 256, 256)     98560       max_pooling1d_6[0][0]            \n__________________________________________________________________________________________________\nadd_10 (Add)                    (None, 256, 256)     0           self_cal__conv1d_27[0][0]        \n                                                                 conv1d_153[0][0]                 \n__________________________________________________________________________________________________\nmax_pooling1d_7 (MaxPooling1D)  (None, 128, 256)     0           add_10[0][0]                     \n__________________________________________________________________________________________________\nself_cal__conv1d_28 (self_cal_C (None, 128, 512)     1540992     max_pooling1d_7[0][0]            \n__________________________________________________________________________________________________\nself_cal__conv1d_29 (self_cal_C (None, 128, 512)     5506304     self_cal__conv1d_28[0][0]        \n__________________________________________________________________________________________________\nconv1d_154 (Conv1D)             (None, 128, 512)     393728      max_pooling1d_7[0][0]            \n__________________________________________________________________________________________________\nadd_11 (Add)                    (None, 128, 512)     0           self_cal__conv1d_29[0][0]        \n                                                                 conv1d_154[0][0]                 \n__________________________________________________________________________________________________\ntransformer_3 (Transformer)     (None, 128, 512)     4205568     add_11[0][0]                     \n__________________________________________________________________________________________________\nglobal_average_pooling1d_3 (Glo (None, 512)          0           transformer_3[0][0]              \n__________________________________________________________________________________________________\ndense_54 (Dense)                (None, 256)          131328      global_average_pooling1d_3[0][0] \n__________________________________________________________________________________________________\ndropout_23 (Dropout)            (None, 256)          0           dense_54[0][0]                   \n__________________________________________________________________________________________________\ndense_55 (Dense)                (None, 256)          65792       dropout_23[0][0]                 \n==================================================================================================\nTotal params: 14,515,072\nTrainable params: 14,515,072\nNon-trainable params: 0\n__________________________________________________________________________________________________\nModel: \"model_15\"\n_________________________________________________________________\nLayer (type)                 Output Shape              Param #   \n=================================================================\ninput_10 (InputLayer)        [(None, 512, 1)]          0         \n_________________________________________________________________\nmodel_14 (Functional)        (None, 256)               14515072  \n_________________________________________________________________\nlambda_2 (Lambda)            (None, 256)               0         \n=================================================================\nTotal params: 14,515,072\nTrainable params: 14,515,072\nNon-trainable params: 0\n_________________________________________________________________\n--------------------------------\n0.963963963963964\n1.0\n--------------------------------\n0.9346846846846846\n0.9725225225225225\n--------------------------------\n0.963963963963964\n1.0\n--------------------------------\n","output_type":"stream"},{"output_type":"display_data","data":{"text/plain":"<Figure size 432x288 with 1 Axes>","image/png":"iVBORw0KGgoAAAANSUhEUgAAAYgAAAEWCAYAAAB8LwAVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8rg+JYAAAACXBIWXMAAAsTAAALEwEAmpwYAABEaUlEQVR4nO3dd3hU1dbA4d9Kp5eE3kLvECCgNAEVwYKC6CeIvTfEgr2jXMu1Il656BU7YscOiHQQCU16j/QWBEJPWd8f5yQMMT0ZziRZ7/PMkzl9zcnMrNl7n7O3qCrGGGNMRkFeB2CMMSYwWYIwxhiTKUsQxhhjMmUJwhhjTKYsQRhjjMmUJQhjjDGZsgRRDInIIRFp4HUc5vQSkZ4istXD4w8QkS3u+6+dH/b/s4hcm8t1p4vITVksixYRFZGQwo2w+LEEcZq5H560R6qIHPWZHpKP/f3jg6CqZVV1Y+FF/Y9jXud+wK7w1zGKAxF52j1P/+czL8SdF+1haP7yMnCX+/5bnHGh+7qXiUiQz7znROT93OxcVc9X1Q8KL1yTE0sQp5n74SmrqmWBzUA/n3mfeB1fLl0L7AOuOZ0HLaK/+PYBz4hIsNeB5EU+z3U9YEUO69QEBuVj3wGpiL4nc80SRIAQkSAReVhENohIgoh8LiKV3WURIvKxO3+/iCwQkWoiMhLoDox2SyCj3fVVRBq5z98XkbdE5EcRSRSR+SLS0Oe454nIGhE5ICL/EZEZWRXN3fXrAT2AW4A+IlLdZ1mwiDzqvoZEEVkoInXcZS1FZIqI7BORXSLyqE98z/ns45RqEhGJF5GHRORP4LD7C/xhn2OsFJEBGWK8WURW+SxvLyIPiMhXGdYbJSJvZPIaHxKRLzPMe0NERrnPrxORje7+N+VQ8vsFOAFclcX5PKUE6O57ts+0isgdIrLOPd6zItJQROaKyEH3fRKWYZ+Pishe99wN8ZkfLiIvi8hm938wRkRK+Z5397XvBMZlEmuQiDwuIn+JyG4R+VBEKrj7PQQEA0tFZEM25+MlnISZ6ReriJzpvrb9IrJURHpmdq7c99or7uvcJCJ3yT+rjeqJyBz3vE0WkagMh7tBRLaLyA4RGZ7hPL3uLtvuPg/P6jyJSJSI/ODGvE9EZolPKalIU1V7ePQA4oFz3efDgN+B2kA48F9gvLvsVuB7oDTOh7ADUN5dNh24KcN+FWjkPn8fSAA6ASHAJ8Bn7rIo4CBwqbtsGJCUcX8Z9v0E8If7fBlwv8+yB9x5TQEB2gKRQDlgB3A/EOFOn+ET33M+++gJbM1wjpYAdYBS7rzLcX6JBgFXAIeBGj7LtgEd3Rga4fyyreGuV9FdLwTYDXTI5DXWA44A5dzpYDf+M4Ey7jlr6i6rAbTM4lw9DXwMXAxsBELd4yoQndn/D7gOmJ3hfzkRKA+0BI4DU4EGQAVgJXCtz7lLBl7FeQ/1cF9zWqyvAd8Bld3/wffA8xm2fdHdtlQmr+cGYL177LLA18BHmb3vsjgfCjQGFqa9ZuA54H33eS2c9+oF7v+2tztdJeO5Am5zX3ttoBLwq7v/EJ91NwBNgFLu9Avusmh33fHu/7M1sIeTn8UROJ/FqkAVYC7wbFbnCXgeGOP+f0NxfrSJ198vhfId5XUAJfnBqQliFXCOz7IaOF/WIe4Hcy7QJpN9pH9ofOZlTBDv+iy7AFjtPr8GmOezTIAtGfeXYd/rgHvc548AS32WrQEuyWSbwcDiLPb3PjkniBtyOI9L0o4LTAKGZbHez8DN7vOLgJXZ7HM2cI37vDewwX1eBtgPDCSTL9EM+3ga+Nh9Ph+4nfwliK4+0wuBh3ymXwFe9zl3yUAZn+Wf4yR1wUkWDX2WdQY2+Wx7AojI5vVMBe7wmW6a9h7N+L7LYnvFSdgXAH8BYZyaIB7CJ+H4/D+vzXiugN+AW33WO5d/JojHfZbfAfziPo92123ms/wl4H/u8w3ABT7L+gDxWZ0nnIQyMbvXXlQfxaMYVDzUA75xi6n7cRJGClAN+Ajng/KZW+R9SURC87DvnT7Pj+D8+gPnV/iWtAXqvNuzvApGRLoC9YHP3FmfAq1FJMadroPz4cooq/m5tcV3QkSuEZElPueqFU5pKKdjfcDJqp6rcM5rVj7FSWwAV7rTqOphnFLLbcAOcarumuXiNTwOPIZTgsqrXT7Pj2YyXdZn+m83xjR/4fyfq+CUQBf6nLdf3Plp9qjqsWziqOnuz3ffITjv0VxT1Z9w3me3ZlhUD7g8LT43xm44P5Yyi8X3fbElk3Wyet9ntk3aeUrbd8bXWdNnOuN5+jdOyWqyW/X4cCaxFEmWIALHFuB8Va3o84hQ1W2qmqSqz6hqC6ALzq/ftAbignTHuwOniA6AiIjvdCauxfklusStf53vMz/tNTTMZLstONUSmTmM88WVpnom66S/RnHaQN4B7gIiVbUisNyNK7sYAL4F2ohIK5xzmN1FAV8APUWkNjAAN0EAqOokVe2N88W12o0nW6o6BedL5I4Mi3Lz+vOikoiU8ZmuC2wH9uIkk5Y+768K6lwskR5mDvvejvMl7rvvZE5NWLn1GPAop772LTglCN/PQBlVfSGT7U957+L8MMgr323SzhNk/jq3+0yfcp5UNVFV71fVBjjVifeJyDn5iCfgWIIIHGOAke4XICJSRUQucZ/3EpHW4lwJcxCnWJ/qbreLrL98c/IjTgmgv9u4dydZfEGJSATwfziN0zE+j6HAle727wLPikhjcbQRkUjgB6CGiNzjNgCWE5Ez3F0vAS4QkcriNHjfk0PMZXA+oHvcuK7HKUGkeRcYLiId3BgapZ1T91fflzhf9n+o6uasDqKqe3CqKcbhVMOsco9XTUQucb+EjwOHOPm/yMljwIMZ5i0BLhWR0uJcWHBjLveVnWdEJExEuuMkwi9UNRUnkb0mIlXd11JLRPrkYb/jgXtFpL6IlAX+BUxQ1eS8Bqiq03ES+7U+sz8G+olIH7cROsJtFM7sR8vnwDD3NVTEqZ7Kqyfc894SuB6Y4M4fDzzufgajgCfd2DIlIhe57zMBDuCU/HP7ngholiACxxs4DYiTRSQRp5Es7Uu0Os4X20GcqqcZnKweeQO4TET+Fvcqm9xS1b04jbov4TQGtgDicL74MuqP8wv0Q1XdmfYA3sOpZuiL0zj6OTDZjfV/OPX0iTj1+P1wiv3rgF7ufj8CluK0NUzm5Ic0q5hX4tS7z8NJjq2BOT7LvwBG4iSBRJxSQ2WfXXzgbpNd9VKaT3Hqtj/1mRcE3Ifzi3IfTkPw7bnYF6o6B/gjw+zXcOq0d7mxFfRS553A3258nwC3qepqd9lDOKWY30XkIE7DbtM87Ps9nPM2E9gEHMP5gZBfj+Pzv1HVLcAlOCWLPTgligfI/HvqHZz3y5/AYuAnnNJMSh6OPwPnfEwFXlbVye7853A+B3/iXHSxyJ2XlcY45/IQzvvyP6o6LQ9xBCxxG1mMwb00byswpLi8wTMSkbo41ULVVfWg1/GYwiEi5wNjVLVejiubXLMSRAnnFucrutd5P4pTl/+7x2H5hZsA78O5zNeSQxEmIqVE5AJx7oupBTwFfON1XMVNsb4L0ORKZ5wqlDCc68r7q+pRb0MqfG6bwS6cK1L6ehyOKTgBnsGpkjyK0572pKcRFUNWxWSMMSZTVsVkjDEmU8WmiikqKkqjo6O9DsMYY4qUhQsX7lXVKpktKzYJIjo6mri4OK/DMMaYIkVE/spqmVUxGWOMyZQlCGOMMZmyBGGMMSZTxaYNwphAlZSUxNatWzl2LLuOUo3xr4iICGrXrk1oaO47grYEYYyfbd26lXLlyhEdHY3Tn5sxp5eqkpCQwNatW6lfv36ut/NbFZOIvCfOsITLs1gu4gz5uF5E/hSR9j7LrhVniMV1InJtZtsbU1QcO3aMyMhISw7GMyJCZGRknkux/myDeJ/suzQ4H6cXxMY4XUi/DSDOOMxP4fRk2gl4SkQq+TFOY/zOkoPxWn7eg35LEKo6E6c75KxcgtN1tKrq70BFEamBM7zfFFXdp6p/A1PwZ985qakw+XH4O95vhzDGmKLIy6uYanHqkH9b3XlZzf8HEblFROJEJG7Pnj35i2LfRlj0IYztCRt+y98+jDGmGCrSl7mq6lhVjVXV2CpVMr1TPGdRjeDmaVCuBnw8EGa/DtaBoSlGEhISiImJISYmhurVq1OrVq306RMnTuS4/fTp05k7d26ujhUTE8OgQYMKGrJnJk6cSP/+/dOnn3/+eRo1apQ+/f3333PxxRezfft2LrvsMgCWLFnCTz/9lL7O008/zcsvv5zjsQr6f4mLi+Puu+/Ow6vLOy+vYtrGqWPC1nbnbQN6Zpg/3a+RRDaEG6fAxDvh16dgxxK45C0IK5PjpsYEusjISJYsWQI4X15ly5Zl+PDhud5++vTplC1bli5dumS73qpVq0hJSWHWrFkcPnyYMmX88/lJTk4mJMQ/X11dunTh1ltvTZ+eN28e5cuXZ/fu3VStWpW5c+fSpUsXatasyZdffgk4CSIuLo4LLrggT8fKzf8lu9caGxtLbGxsno6ZV14miO+Au0TkM5wG6QOqukNEJgH/8mmYPg94xO/RhJeFy9+HOW/A1Gdgz1oY9DFUzu9wz8b80zPfr2Dl9sIdq6hFzfI81a9lnrZZuHAh9913H4cOHSIqKor333+fGjVqMGrUKMaMGUNISAgtWrTghRdeYMyYMQQHB/Pxxx/z5ptv0r1790z3OX78eK6++mpWrVrFxIkTufLKKwFYsGABw4YN4/Dhw4SHhzN16lRKly7NQw89xC+//EJQUBA333wzQ4cOTe9TLSoqiri4OIYPH8706dN5+umn2bBhAxs3bqRu3bo8//zzXH311Rw+fBiA0aNHpyewF198kY8//pigoCDOP/98br75Zi6//HIWLVoEwLp167jiiivSp31VqVKF8uXLs379eho1asS2bdsYOHAgc+fOpX///sydO5fnnnuO+Ph4LrroIhYtWsSTTz7J0aNHmT17No884nxVrVy5kp49e7J582buueeePP3Sv+6664iIiGDx4sV07dqVQYMGMWzYMI4dO0apUqUYN24cTZs2Zfr06bz88sv88MMPPP3002zevJmNGzfm65hZ8VuCEJHxOCWBKBHZinNlUiiAqo7BGUP2ApwxYY/gDBqOqu4TkWeBBe6uRqhqdo3dhRk0dLsHqreGL29w2iUGvgeNzz0thzfmdFBVhg4dysSJE6lSpQoTJkzgscce47333uOFF15g06ZNhIeHs3//fipWrMhtt92Wq1LHhAkTmDJlCqtXr+bNN9/kyiuv5MSJE1xxxRVMmDCBjh07cvDgQUqVKsXYsWOJj49nyZIlhISEsG9fzh/xlStXMnv2bEqVKsWRI0eYMmUKERERrFu3jsGDBxMXF8fPP//MxIkTmT9/PqVLl2bfvn1UrlyZChUqsGTJEmJiYhg3bhzXX399lsfp2rUrc+fOJSUlhcaNG3PmmWcyadIkLrroIpYuXUrHjh3ZuXMnAGFhYYwYMYK4uDhGjx4NOKWB1atXM23aNBITE2natCm33357nm5Q27p1K3PnziU4OJiDBw8ya9YsQkJC+PXXX3n00Uf56quv/rFNQY+ZGb8lCFUdnMNyBe7MYtl7OAOke6PROXDLdJhwFXxyGZzzBHS7z0kgxhRAXn/p+8Px48dZvnw5vXv3BiAlJYUaNWoA0KZNG4YMGUL//v1PqYvPSdqv/rp161KrVi1uuOEG9u3bx7Zt26hRowYdO3YEoHz58gD8+uuv3HbbbenVJ5UrV87xGBdffDGlSpUCnLvT77rrLpYsWUJwcDBr165N3+/1119P6dKlT9nvTTfdxLhx43j11VeZMGECf/zxR5bH6dKlS3qC6Ny5M506dWLEiBEsXryYZs2aERERkWOsF154IeHh4YSHh1O1alV27dpF7dq1c9wuzeWXX05wcDAABw4c4Nprr2XdunWICElJSX45ZmaKdCO1X1WuDzdOhlYDYeoI+PwaOJ7odVTGFJiq0rJlS5YsWcKSJUtYtmwZkydPBuDHH3/kzjvvZNGiRXTs2JHk5ORc7XP8+PGsXr2a6OhoGjZsyMGDBzP9lZuTkJAQUlNTAf5xU5dvm8Zrr71GtWrVWLp0KXFxcTk26g4cOJCff/6ZH374gQ4dOhAZGZnlumkliLlz59K5c2fKlSvHsWPHmD59eo7tMGnCw8PTnwcHB+f6PKbxfa1PPPEEvXr1Yvny5Xz//fdZ3uxW0GNmxhJEdsLKwMB34byRsPoHePdcSNjgdVTGFEh4eDh79uxh3rx5gPNrfMWKFaSmprJlyxZ69erFiy++yIEDBzh06BDlypUjMTHrH0epqal8/vnnLFu2jPj4eOLj45k4cSLjx4+nadOm7NixgwULnBrjxMREkpOT6d27N//973/Tv8TSqpiio6NZuHAhQLYJ5sCBA9SoUYOgoCA++ugjUlJSAOjduzfjxo3jyJEjp+w3IiKCPn36cPvtt2dbvQTQvHlztm/fzuzZs2nXrh3gXJ01ZswYunbt+o/1czo/BXXgwAFq1XKu9H///ff9dpzMWILIiQh0uQuu/gYO7YaxvWDtJK+jMibfgoKC+PLLL3nooYdo27YtMTEx6VUqV111Fa1bt6Zdu3bcfffdVKxYkX79+vHNN98QExPDrFmz/rG/WbNmUatWLWrWrJk+76yzzmLlypUkJCQwYcIEhg4dStu2benduzfHjh3jpptuom7durRp04a2bdvy6aefAvDUU08xbNgwYmNj06tYMnPHHXfwwQcf0LZtW1avXp3+i7tv375cfPHFxMbGEhMTc8rlpkOGDCEoKIjzzjsv2/MjIpxxxhlERkam1+F37tyZjRs3ZlqC6NWrFytXriQmJoYJEyZku+/8ePDBB3nkkUdo165doZQK8kK0mFzzHxsbq34fUW7/ZvhsCOxcBr0ehe7DIchyrMneqlWraN68uddhlHgvv/wyBw4c4Nlnn/U6FM9k9l4UkYWqmun1staba15UrOu0S3w/DKaNhO1LYMAYiCjvdWTGmGwMGDCADRs28Ntv1ltCXliCyKvQUjDgv1CzPUx6FN49B674BKo08ToyY/xu5MiRfPHFF6fMu/zyy3nsscc8iih3vvnmm3/MGzBgAJs2bTpl3osvvkifPn0K/fgJCQmcc845/5g/derUbBvMvWZVTAWxaRZ8cR0kH4dLx0KzvN1JaUoGq2IygSKvVUxWgV4Q9bvDrTOc/pw+GwzTnnd6hzXGmGLAEkRBVagN1/8CMUNgxgtOojh2wOuojDGmwCxBFIbQCKdzvwtehvW/OpfC7l7tdVTGGFMgliAKiwh0uhmu/R6OH3Qar1d+53VUxhiTb5YgClu9LnDLDKjSFD6/GqY+C6kpXkdlSjAbDyL3itJ4EJC3/01+2GWu/lChFlz/M/w0HGa9DDuWwsB3oJQNrW1OPxsPIvcCbTyInOT2f5NfVoLwl5BwuPhNuOh12DjdaZfYtdLrqIzXfn4Yxl1YuI+fH85zGAsXLqRHjx506NCBPn36sGPHDgBGjRpFixYtaNOmDYMGDSI+Pp4xY8bw2muvZdnVRpq08SDOO+88Jk6cmD5/wYIFdOnShbZt29KpUycSExNJSUlh+PDhtGrVijZt2vDmm28CTl9Me/fuBZweYnv27Ak4X6BXX301Xbt25eqrryY+Pp7u3bvTvn172rdvf8qv6BdffJHWrVvTtm1bHn74YTZs2ED79u3Tl69bt+6UaV++40EAp4wHATB37ly6du1KfHw8rVq14sSJEzz55JNMmDDhlK420saDaNCgAaNGjfL8f5NfVoLwt9jroWoLp7rp3XOh/1vQcoDXUZkSzMaDCNzxIJKSkvzyv8kvSxCnQ90znHaJz69xbqzbvgTOeRKCsu6MzBRT57/gdQQ2HkQAjwexZs2aQv/fFIQliNOlfA247kf45SGY8zrs/BMG/g9K5/zBMKYwpY0Hkdbdt68ff/yRmTNn8v333zNy5EiWLVuWq336jgcBpI8HceaZZ+YptvyMB5Gamprjl/bAgQN55plnOPvss3M1HsSbb75JSkoKN99882kdD8If/5uCsDaI0ykkDC56DfqNgvjZzpCmO/3/TzbGl40HEbjjQTRt2rRQ/zcFZQnCCx2uda5ySkmCd3vDsi+9jsiUIDYeROCOBxEWFlao/5uCss76vJS4C764FjbPgy5D4ZynIdhq/Yob66wvMNh4EDYeRNFSrhpc8x1MfgzmvuncL9HvDajcwOvIjClWbDyI/LEE4bWQMLjg31AjBn68H0Z3hA7XwVkPOgnEmABi40Hkj40H4bEiWcWUUeJOmPESLPoAgsPgzDug690QUcHryEwBrFq1imbNmiEiXodiSjBVZfXq1TYeRJFVrjpc9Crc+Qc0vcDppuONtjBnFCQd9To6k08REREkJCRQXH6MmaJHVUlISMjVPRy+rAQRyHb8CVNHwPopUK4m9HzYGXfCGrKLlKSkJLZu3fqP6/qNOZ0iIiKoXbv2P+7ozq4EYQmiKIifDb8+A1v/gMhGcPYT0OISp4txY4wpAKtiKuqiu8GNk2HQeAgKdS6NHdsTNkzzOjJjTDFmCaKoEIFmF8Dtc6D/GDiyDz7qDx/0g60LvY7OGFMMWYIoaoKCIWYwDI2Dvi86XYi/ezZMuAr2rPU6OmNMMWIJoqgKCYczb4NhS6Dno05103/OgIl3wYGtXkdnjCkGLEEUdeHloOdDMGwpnHE7/DkBRrWHSY851VDGGJNPfk0QItJXRNaIyHoR+cewVyJST0SmisifIjJdRGr7LHtJRFaIyCoRGSV2l1H2ykRB33/B0IXQ+nL4/T/OPRQz/g3HD3kdnTGmCPJbghCRYOAt4HygBTBYRFpkWO1l4ENVbQOMAJ53t+0CdAXaAK2AjkAPf8VarFSs64xad/s8qH8WTHsORsXA/LGQnLuB0I0xBvxbgugErFfVjap6AvgMuCTDOi2AtN6zpvksVyACCAPCgVBglx9jLX6qNoNBn8CNv0JUU/j5ARjdAZZOgNQUr6MzxhQB/kwQtYAtPtNb3Xm+lgKXus8HAOVEJFJV5+EkjB3uY5Kqrsp4ABG5RUTiRCRuz549hf4CioU6HeG6H+CqryCiInxzC4zpDmt+gWJyk6Qxxj+8bqQeDvQQkcU4VUjbgBQRaQQ0B2rjJJWzRaR7xo1VdayqxqpqbJUqVU5n3EWLCDQ61xkX+7L3IPkojL8C3usLf/1zaENjjAH/JohtQB2f6druvHSqul1VL1XVdsBj7rz9OKWJ31X1kKoeAn4GOvsx1pIhKAhaDXQ6A7zoNfg7Hsb1hU/+D3Yu9zo6Y0yA8WeCWAA0FpH6IhIGDAK+811BRKJEJC2GR4D33OebcUoWISISilO6+EcVk8mn4FCIvQHuXgznPg1bfocx3eCrm2Hfphw3N8aUDH7rFlRVk0XkLmASEAy8p6orRGQEEKeq3wE9gedFRIGZwJ3u5l8CZwPLcBqsf1HV7/0Va4kVVhq63esMUDTnDfh9DKz4Glr0h1IVPQ4OKB3lXIlVu6MzsJIx5rSy3lzNSQd3wIwXYfWPoAFwpdPRv0FTIbQ01OsKDXo6j6otnOoyY0yBWXffpmg6uh/+mgMbpzuPvW5fU2WqQP0eJxNGxTpZ7sIYk73sEoSNPGMCV6mK0OxC5wFwYBtsmnEyYSz/0plfueHJZFG/O5Sq5Em4xhQ3VoIwRZMq7Fl9MlnEz4YTh0CCoEbMyYRR5wwIzdswi8aUJFbFZIq/lCTYtvBkwti6AFKTISQC6nY+mTCqt7H2C2N8WIIwJc/xRPhr7smEsXulM79UZefKqAY9oWEvqBTtXYzGBABrgzAlT3g5aNLHeQAk7oRNM51ksWEarPzWmV8p+mTpIvosKBPpSbjGBCIrQZiSRxUS1p9MFvGz4PhBQKBGm5MJo25nCC3lbazG+JlVMRmTnZRk2L74ZHXUlvmQmgTB4VCn08nqqBoxzpCvxhQjliCMyYsTh2HzPLeEMR12LXPmR1Q42X7RoBdUbuB0hGhMEWZtEMbkRVgZp/fbRuc604f2nHr/xSq315cKdaBBDydZ1D8Lylb1KmJj/MJKEMbkhSrs23gyWWyaCcf2O8uqtTq1/SK8rGdhGpNbVsVkjL+kpsCOpScTxubfIeU4BIWebL9o0BNqtodgK7CbwGMJwpjTJemokyTSEsaOpYBCWDmnG5C0hBHVxNovTECwNghjTpfQUs4VTw17OdNH9p28/2LjdFjzkzO/XA2f/qN6QPka3sRrTDYsQRjjT6UrQ8v+zgOcUfw2ug3e6ybD0vHO/CrNTiaMel0horwX0RpzCqtiMsYrqamwa7lbupjmjA+efBQkGGrHnkwYtWJtwCTjN9YGYUxRkHQMtv5xsjpq+2J3wKQyEN0V2l8DzS6ytgtTqCxBGFMUHf3b6cY8rTpq/2ao1cEZR7z+WV5HZ4oJa6Q2pigqVQma93MeKclOe8X05+GDftDwbDjnSajZzusoTTFmHeMbUxQEh0D7q2HoIjhvpFP9NLYnfHEd7F3vdXSmmMoxQYiI9U5mTKAIjYAud8GwpXDWg7B2MrzVCb4fBge3ex2dKWZyU4JYJyL/FpEWfo/GGJM7ERXg7Mdg2BLoeBMs/gRGtYMpTzr3XhhTCHKTINoCa4F3ReR3EblFROwibWMCQdmqcMFLMDQOWvSHOaPgjRiY9YrTK60xBZCnq5hEpAfwKVAR+BJ4VlUDogLUrmIyBti1AqY+C2t/hrLVoMeD0P5aCA71OjIToLK7iilXbRAicrGIfAO8DrwCNAC+B34qzECNMQVUrSVc+RncMMkZr+LH+2F0R1j2pXNjnjF5kKs2COAS4N+q2k5VX1XVXar6JfCLf8Pzv+PJKdz5ySKWbtnvdSjGFJ66Z8L1P8OVXzjjW3x1I/z3LFg3xemy3BR9yced+2R+ew5mvOSXQ+TmPog2qnooswWqenchx3Pa7TpwnCVb9nP5f+cxsn8rLo+t43VIxhQOEWhynjPw0fKvYNpz8MllTl9P5zwFdc/wOkKTF6mpsHvFyTvt/5oLSUecrlma9PXLIXNsgxCRD4Bhqrrfna4EvKKqN/glonwqSBvEvsMnGDp+EXPWJ3BN53o8fmELwkLsFhFTzCSfgEUfOL82D++GphfA2U9ANbtAMWDt33wyIWycAUf2OvOjmp7sqyu6q3NVWz4VqKsNEVmsqu1ymue1gjZSJ6ek8tKkNYyduZGO0ZV4a0h7qpaLKMQIjQkQxw/B/LedK56OJ0LbQdDzEahUz+vIzJF9ED/rZFLYt9GZX7b6yYTQoAeUr1lohyxoglgK9FTVv93pysAMVW1daBEWgsK6ium7pdt58MulVCgVyttXdaB93UqFEJ0xAejIPpj9KswfCyjE3gjd74eyVbyOrORIOgZbfAaY2r6E9AGmorudTApVmvqtk8aCJohrgEeBLwABLgNGqupHhR1oQRTmZa4rtx/k1o/j2HXgOCMuacmgTnULZb/GBKQD22DGC7D4YwgtDZ3vhM532ZgU/pCaAjv/PHWI2uRjEBQCtX2GqK3V/rRdmlzg3lxFpCXgDpHFb6q6MpcH7gu8AQQD76rqCxmW1wPeA6oA+4CrVHWru6wu8C5QB1DgAlWNz+pYhX0fxP4jJxg6fjGz1u3lyjPq8lS/FoSHWK8jphjbs9ZpyF45EUpVhrOGO6WKUKtqzTdV+HvTyYSwaabTSy9A1RZuQugF9TpDeDlPQiyU7r5FpCqQ/k5R1c05rB+Mcwd2b2ArsAAY7JtcROQL4AdV/UBEzgauV9Wr3WXTcUoqU0SkLJCqqkeyOp4/bpRLSVVenryGt6dvoH3dirx9VQeqlbcPiynmti2EqSOcL7TytaHXI9BmkNNhoMnZ4b2wacbJpLDf/aosX+vUYWbLVfMuRh8FrWK6GOfmuJrAbqAesEpVW+awXWfgaVXt404/AqCqz/usswLoq6pbRESAA6pa3u33aayqdsvti/TnndQ//rmDB75cSpnwEMZc1Z4O9Sr75TjGBJQN02DqM07PsVFNne7Fm11oAxZldOIIbJ57MiHsXObMD68A9bufTAqRjQLy3BV0PIhngTOBX1W1nYj0Aq7KxXa1gC0+01uBjBdeLwUuxamGGgCUE5FIoAmwX0S+BuoDvwIPq2qK78YicgtwC0Dduv5rJ7iwTQ0aVS3LLR/FMWjs7zzVryVDzqiLBOA/uyBUlckrd7Fi2wHuPqcxIcF2qW+J1rCX88W26jun+44JQ6BGDEQ18TqywJG4A7bMh5QTEBTq3KB49uNOtVGNmCJf6spN9EmqmiAiQSISpKrTROT1Qjr+cGC0iFwHzAS2ASluXN2BdsBmYAJwHfA/341VdSwwFpwSRCHFlKmm1cvx3Z3dGDZhMY9/u5xlWw/wzCUtiQgtHu0S8zYk8OIvq1ni3lFeoXQYN3ar721Qxnsi0OISaHohLP3UueJp6wKvowocERXgjFudRFq3s3PXejGSmwSx320DmAl8IiK7gdx0E7kNp4E5TW13XjpV3Y5TgsA9xkBV3S8iW4ElqrrRXfYtTinmlARxulUoHcr/ru3Ia1PWMnraelbvSmTMVe2pUaGUl2EVyPJtB3hp0hpmrt1DjQoRvDSwDT8u28Erk9dwfqvq1KxYdF+bKUTBIc6Y2O2v8ToScxrlpg7hEuAIcC9O30sbgH652G4B0FhE6otIGDAI+M53BRGJEpG0GB7BuaIpbduKIpJ2QfbZQK6unPK34CBheJ+mjLmqPet3JdLvzdn8sano9b+/ae9h7vp0ERe9OZtlW/fz+IXNmTa8J//XsQ7P9W9FqipPTlxBcRmz3BiTd9kmCPdKpB9UNVVVk1X1A1UdpaoJOe1YVZOBu4BJwCrgc1VdISIj3IZvgJ7AGhFZC1QDRrrbpuBUP00VkWU491+8k7+X6B99W9Xg2zu7Uj4ilCvf+Z0P58UXiS/TXQeP8eg3yzj31Rn8tno3d5/diBkP9uKm7g3Sq8vqVC7Nvec24ddVu5i0YpfHERtjvJKbq5imApeq6oHTE1L+eDUexMFjSdz72RKmrt7NZR1q81z/VgHZLnHgSBJvz9jA+3M3kZKqDDmjHnf2akSVcuGZrp+UksrFo+fw9+ETTLnvLMpF2HgCxhRHBb2K6RCwTESm4NP2UBx6ci0M5SNCeeeaWN6Yuo43pq5j7a5E3r6qA7UCpO7+6IkUxs3dxJjpG0g8nsyAmFrc27sJdSqXzna70OAgnr+0NQP+M4dXJq/l6YuzvarZGFMM5SZBfO0+TBaCgoR7ezehVa0K3DthCf3enM1bV7anc8NIz2JKSkllwoItjJq6jt2Jxzm3eVWG92lKs+q57z4hpk5FrjmzHh/Mi6d/u1rE1Knov4CNMQEnT0OOBrJAGXJ0/e5D3PpRHPEJR3jsguZc3zX6tN4vkZqq/LBsB69OXkN8whE6Rlfiob7NiI3O3819iceSOPfVGVQuE853d3Ul1O6NMKZYKeiQo5tEZGPGR+GHWTw0qlqWb+/syjnNqjLih5Xc9/lSjp5IyXnDAlJVpq/ZzUVvzubu8YuJCA1m3HUd+fzWzvlODgDlIkJ55uKWrNpxkHFzNhVixMaYQJebKibfzBIBXA5YXxPZKBcRypirOvDWtPW8+uta1u5KZMxVHXKs98+vhX/9zUu/rGb+pn3UrVyaNwbF0K9NTYKCCqfk0qdldc5tXo3Xpqzj/FY1/PY6jDGBJV9VTG6RpIMf4sm3QKliyui31bsY9tkSQoKE0Ve2p2ujqELb99pdifx70hqmrNxFVNlw7j6nEYM61vXLaHjb9h+l96szOKN+Zd67rmOx62bEmJKqoFVM7X0esSJyG7kreRjg7GbV+O6ubkSVDefq/83nnZkbC3y/xNa/jzD8i6X0fX0mv29IYPh5TZjxQE+u6Rztt6FSa1Usxf3nNWXamj38uGyHX45hjAksubkPYprPZDKwCWdM6jX+DCyvArUEkebQ8WQe+GIpPy/fSb+2NXlxYGtKh+UtzyYcOs5b0zbw8e9/gcB1XaK5vUdDKpUJ81PUp0pOSaX/f+aw6+Bxfr2vBxVK2b0RxhR1hTIeRKAL9AQBTkPyf6Zv4OXJa2harRxjr46lbmTO9fmHjifz7qyNvDNzI0eTUvi/2DoMO7exJ31ALd92gItHz2Zwp7qMHBBQo84aY/KhoFVM/xKRij7TlUTkuUKMr8QQEe7s1Yhx13Vk+/6j9Bs9m5lr92S5/vHkFP43exNnvTSN139dR4+mVZh8bw9eGNjGsw4CW9WqwPVd6/PJ/M0s/Kvo9UFljMm93FQxLVbVdhnmLVLV9n6NLI+KQgnC118Jh7n1o4Ws3ZXIA32acVuPBukNvympyjeLt/HalLVs23+Ubo2ieKBPU9oGyI1qh48n0/vVGZSLCOWHu7vZvRHGFGEFKkEAwSKS3mGPiJQCMu/Ax+RavcgyfH1HF85vXYMXf1nNXZ8u5vDxZCav2Enf12cy/IulRJYN4+Mbz+Djm84ImOQAUCY8hBGXtGLNrkTGzrRbYowprnLTSvoJTq+q49zp64EP/BdSyVE6LITRg9vRplYFXvxlNTPW7uHQ8WQaVCnD20Pa07dV9YC9nPTcFtXo27I6o6au46I2NagXWbwGSjHG5LKRWkT6Aue6k1NUdZJfo8qHolbFlNGsdXt4c+p6Lm1fi8s61C4Sw33uPHCMc1+dQbu6Ffnwhk4Bm8yMMVkrUG+uIlIfmK6qv7jTpUQkWlXjCzfMkq174yp0b1wl5xUDSPUKETzYtylPTlzBxCXb6d+ultchGWMKUW5+pn4BpPpMp7jzjGHIGfWIqVORZ39Yyf4jJ7wOxxhTiHKTIEJUNf2T7z4/PXdmmYAXHCT8a0Br9h9N4vmfVnsdjjGmEOUmQezxGSIUEbkE2Ou/kExR06JmeW7qVp8JcVuYvzHH0WiNMUVEbhLEbcCjIrJZRLYADwG3+DcsU9QMO7cxtSuV4tFvlnE82f/dmxtj/C/HBKGqG1T1TKAF0FxVu2DdfZsMSoeF8Gz/VmzYc5gx0+3eCGOKg7xcS1kXeEhE1gFv+ykeU4T1alqVi9rU4K1p69m455DX4RhjCijbBCEi0SLyiIj8CXwE3A70zuqaWWOe7NeC8NAgHvtmeYG7NTfGeCvLBCEi84Afce6VGOgOEJRo9z+Y7FQtF8HD5zdj3sYEvlq0zetwjDEFkF0JYhdQDqgGpN3BZT8JTY4Gd6xLh3qVGPnjSvYdtnsjjCmqskwQqtofaA0sBJ4WkU1AJRHpdJpiM0VUkHtvROKxZJ77caXX4Rhj8inbNghVPaCq41T1POAM4AngNfdyV2Oy1LR6OW7t0YCvF21j7nq7bcaYoijXVzGp6m5VHa2qXYFufozJFBNDz25MvcjSPPbtco4l2b0RxhQ1+eoyVFX/KuxATPETERrMyP6t2bT3MP+Ztt7rcIwxeRT4fUqbIq1b4ygGtKvF2zM2sG5XotfhGGPyIDdjUnfNzTxjsvLYhc0pEx7Co98sIzXVLoQzpqjITQnizVzOMyZTUWXDefT85iyI/5vP4+z6BmOKiuxulOssIvcDVUTkPp/H00BwbnYuIn1FZI2IrBeRhzNZXk9EporInyIyXURqZ1heXkS2isjoPL4uE2Auj61Np/qV+ddPq9iTeNzrcIwxuZBdCSIMKItzJ3U5n8dB4LKcdiwiwcBbwPk4Hf0NFpEWGVZ7GfhQVdsAI4DnMyx/FpiZ88swgU7EuTfiWFKq3RthTBGR5ZCjqjoDmCEi76ddtSQiQUBZVT2Yi313Atar6kZ328+ASwDfb4cWwH3u82nAt2kLRKQDzl3cvwDW91Mx0KhqWW7v2ZA3pq7j0va16dGkaA2xakxJk5s2iOfdqp4ywHJgpYg8kIvtagG+Fc5b3Xm+lgKXus8HAOVEJNJNRK8Aw7M7gIjcIiJxIhK3Z8+eXIRkvHZ7z4Y0iCrD498u4+gJuzfCmECWmwTRwi0x9Ad+BuoDVxfS8YcDPURkMdAD2IYz5vUdwE+qujW7jVV1rKrGqmpslSr2a7QoiAgNZuSA1mzZd5RRv63zOhxjTDayrGLyESoioTgJYrSqJolIbq5V3AbU8Zmu7c5Lp6rbcUsQIlIWp9fY/SLSGeguInfgtIOEicghVf1HQ7cpejo3jOTyDrV5Z+ZGLompSbPq5b0OyRiTidyUIP4LxANlgJkiUg+noTonC4DGIlJfRMKAQcB3viuISJRbnQTwCPAegKoOUdW6qhqNU8r40JJD8fLoBc0pXyqUR762eyOMCVS5GXJ0lKrWUtUL1PEX0CsX2yUDdwGTgFXA56q6QkRGiMjF7mo9gTUishanQXpkfl+IKVoqlQnj8Qubs3jzfj75Y7PX4RhjMiE5jfolItWAfwE1VfV891LVzqr6v9MRYG7FxsZqXFyc12GYPFBVrvrffP7ccoCp9/egavkIr0MypsQRkYVZjRKamyqm93FKATXd6bXAPYUSmSnRRITn+rfmeEoqz3xv90YYE2iyu5M6rQE7SlU/B1IhverIrk80haJ+VBnuPrsRPy7bwW+rd3kdjjHGR3YliD/cv4dFJBJ3uFERORM44O/ATMlxy1kNaVy1LE98u4IjJ5K9DscY48ouQYj79z6cq48aisgc4ENgqL8DMyVHWEgQ/7q0Ndv2H+W1KWu9DscY48ruPogqIpLWDcY3wE84SeM4cC7wp59jMyVIx+jKDO5Uh/fmxHNJTC1a1argdUjGlHjZlSCCcW5SK4dzD0SIO6+0O8+YQvVw3+ZUKh3Go98sI8XujTDGc9mVIHao6ojTFokp8SqUDuXJfi24e/xiPpwXz/Vd63sdkjElWm7aIIw5bfq1qcFZTarw8qQ17Dhw1OtwjCnRsitBnHPaojDGJSKM7N+K3q/NYPDY36ldqbTXIQUEEejVtCpDzqxLeEiuxusypsByvJO6qLA7qYuX75Zu58O58RSPd2fBHT6ezOqdidSqWIp7ezdhQLtaBAdZId8UXHZ3UluCMKaImL1uLy9NWs2fWw/QuGpZHujTlN4tqiFiicLkX0G72jDGBIBujaOYeGdX3h7SnhRVbvloIZe+PZffNyZ4HZoppixBGFOEiAjnt67B5HvO4sWBrdmx/xiDxv7Ote/9wfJt1sGBKVxWxWRMEXYsKYWP5v3FW9PXs/9IEv3a1uT+3k2IjirjdWimiLA2CGOKuYPHknhn5kbenbWJpJRUruhYh7vPaUw160Ld5MAShDElxO7EY4z+bT2fzt9MSLBwfdf63HZWQyqUDvU6NBOgLEEYU8JsTjjCq1PWMHHpdsqFh3B7z0Zc1yWaUmF2D4U5lSUIY0qoldsP8vLkNfy2ejfVyocz7JwmXB5bm9Dgond9yrGkFBbE72P2+r1s2H2ImDoV6dooija1K9o9IQVgCcKYEu6PTft46ZfVxP31N9GRpbn/vKZc2LoGQQH8xZqSqizfdoDZ6/cyZ/1e4v76mxPJqYQGC7UrlWbT3sMAlIsIoUvDSLo1iqJroyjqR5Wxe0PywBKEMQZVZdqa3bz0yxpW70ykZc3yPNi3GWc1jgqIL1RVJT7hiJMQ1u1l7oa9HDzmDCDVvEZ5ujWKpGujKDrVr0zpsBASDh1n7oYE5qzfy6x1e9m23+m7q2aFCLo2iqJb4yi6NIyiSrlwL19WwLMEYYxJl5KqfLd0G69MXsvWv49yZoPKPNS3Ge3qVjrtsew9dJw5bglhzvqE9C/5WhVLOSWCxlF0aRhJVNnsv+RVlc37jqSXNuasT+DA0SQAmlUv5yQMN7mUCc+uC7qSxxKEMeYfTiSnMv6Pzbz52zr2HjpBn5bVGH5eUxpX899wL0dOJDN/0z7mrNvL7PV7Wb0zEYAKpULp0jAy/Yu8XmTpApVqUlKVldsPpieMP+L3cSI5lZAgoX3dSm4JI5I2tSsWyfaYwmQJwhiTpcPHk3lv9ibGztzI4RPJDGxfm3t6N6FWxVIF3ndySipLtx5gznonISze/DdJKUpYcBCx0ZXo1thJCC1rVvBrQ/OxpBQW/vV3esJYtu0AqlA2PIQzG1ROT0yNqpYNiOq208kShDEmR/sOn+Dt6ev5YN5foHB153rc0bMhkTlU7/hSVTbsOZzeLjB/YwKJx5MRgZY1y6d/EcfWq+zpJbf7j5xg3oaE9IQRn3AEgKrlwtMbu7s2iqJ6heJ/o6ElCGNMrm3ff5Q3fl3HFwu3UDoshJu7N+DG7vUpm0Xd/e6Dx5izYS+z1zkNxjsPHgOgXmTp9ITQuUEklcqEnc6XkSdb9h1h7oa9zF6fwNz1e0k4fAKARlXLpieMMxpUpnxE8bvh0BKEMSbP1u8+xCuT1/Dz8p1ElgnjrrMbceUZdUlKUeZvPPnre+2uQwBUKh1KFzchdGsURZ3KRXOwp9RUZfXOxPRqsfmbEjiWlEpwkNC2doX0hNGubiXCQop++4UlCGNMvi3Zsp+XflnN3A0JVC4TxsGjSSSnKhGhQXSMrpz+hdmiRvmAvq8iv44np7B48/70hLF0y35SFUqFBnN+6+q8OLBNkW7otgRhjCmw2ev2Mv6PzURHOVVH7etWIiK05HXdceBoEvM3JvDb6t18tmALt/ZowCPnN/c6rHzLLkHYBcHGmFzp1ti5+aykq1AqlPNaVue8ltUJChL+O2MjnaIrc07zal6HVuiKbrnIGGM89uRFLWhRozz3f7E0/Sa/4sQShDHG5FNEaDBvDWlPcooy9NNFJKWkeh1SofJrghCRviKyRkTWi8jDmSyvJyJTReRPEZkuIrXd+TEiMk9EVrjLrvBnnMYYk1/1o8rwwsDWLNrsNOYXJ35LECISDLwFnA+0AAaLSIsMq70MfKiqbYARwPPu/CPANaraEugLvC4iFf0VqzHGFMRFbWpy9Zn1eGfWJqas3OV1OIXGnyWITsB6Vd2oqieAz4BLMqzTAvjNfT4tbbmqrlXVde7z7cBuoIofYzXGmAJ5/KLmtKpVnvs/X8KWfUe8DqdQ+DNB1AK2+Exvdef5Wgpc6j4fAJQTkUjfFUSkExAGbMh4ABG5RUTiRCRuz549hRa4McbkVXhIMG9d2R5VuGv8Yk4kF/32CK8bqYcDPURkMdAD2AakpC0UkRrAR8D1qvqPs62qY1U1VlVjq1SxAoYxxlv1Isvw0mVtWLplPy/8XPTbI/yZILYBdXyma7vz0qnqdlW9VFXbAY+58/YDiEh54EfgMVX93Y9xGmNMoTm/dQ2u6xLNe3M2MWnFTq/DKRB/JogFQGMRqS8iYcAg4DvfFUQkSkTSYngEeM+dHwZ8g9OA/aUfYzTGmEL3yAXNaFO7AsO/WFqk2yP8liBUNRm4C5gErAI+V9UVIjJCRC52V+sJrBGRtUA1YKQ7//+As4DrRGSJ+4jxV6zGGFOY0tojAO78dBHHk1Ny2CIwWV9MxhjjJ78s38ltHy/kui7RPH1xS6/DyVR2fTF53UhtjDHFVt9W1bmha33enxvPz8t2eB1OnlmCMMYYP3r4/Ga0rVORB7/8k78SDnsdTp5YgjDGGD8KCwli9OB2iBS99ghLEMYY42d1Kpfmlf+LYfm2g4z8cZXX4eSaJQhjjDkNereoxs3d6/PhvL/44c/tXoeTK5YgjDHmNHmwbzPa1a3Iw18tY9PewG+PsARhjDGnSWhwEKOvbE9IsHDnJ4s4lhTY7RGWIIwx5jSqVbEUr1zelpU7DvLsDyu9DidbliCMMeY0O6d5NW49qwGfzN/MxCXbct7AI5YgjDHGA8P7NKVDvUo8+vUyNu455HU4mbIEYYwxHnDaI9oRFhLEHQHaHmEJwhhjPFKjQilevSKG1TsTeeb7FV6H8w+WIIwxxkO9mlbl9p4NGf/HFr5dHFjtEZYgjDHGY/f3bkKn6Mo8+s0y1u8OnPYISxDGGOOxkOAgRg1uR0RoMHd+soijJwKjPcIShDHGBIDqFSJ47YoY1u5O5KnvlnsdDmAJwhhjAkaPJlW4s2cjPo/bylcLt3odjiUIY4wJJPec25gz6lfm8W+Xs25XoqexWIIwxpgAEhIcxJuD21EmPJg7PlnEkRPJnsViCcIYYwJM1fIRvH5FO9bvOcSTE727P8IShDHGBKBujaMYenZjvly4lS/itngSgyUIY4wJUMPOaUznBpE8MXE5a3ae/vYISxDGGBOggoOENwbHUDY8lDs+Wcjh46e3PcIShDHGBLCq5SIYNSiGTXsP88S3y1HV03ZsSxDGGBPgujSKYtg5Tfh68TY+P43tEZYgjDGmCLjr7EZ0axTFkxNXsGrHwdNyTEsQxhhTBAQHCa9dEUP5UqHc+ckiDp2G9ghLEMYYU0RUKRfOm4PbEZ9wmMe+Web39ghLEMYYU4Sc2SCS+3o3YeKS7Yz/w7/tEZYgjDGmiLmjZyO6N47i6e9XsHK7/9ojLEEYY0wRExQkvH5FDJVKh3Lnp4tIPJbkn+P4Za8uEekrImtEZL2IPJzJ8noiMlVE/hSR6SJS22fZtSKyzn1c6884jTGmqIksG86bg9uzed8RHvnaP+0RfksQIhIMvAWcD7QABotIiwyrvQx8qKptgBHA8+62lYGngDOATsBTIlLJX7EaY0xR1Kl+ZR7o05SGVcrij/bqkMLfZbpOwHpV3QggIp8BlwArfdZpAdznPp8GfOs+7wNMUdV97rZTgL7AeD/Ga4wxRc5tPRr6bd/+rGKqBfg2sW915/laClzqPh8AlBORyFxui4jcIiJxIhK3Z8+eQgvcGGOM943Uw4EeIrIY6AFsA3I9WreqjlXVWFWNrVKlir9iNMaYEsmfVUzbgDo+07XdeelUdTtuCUJEygIDVXW/iGwDembYdrofYzXGGJOBP0sQC4DGIlJfRMKAQcB3viuISJSIpMXwCPCe+3wScJ6IVHIbp89z5xljjDlN/JYgVDUZuAvni30V8LmqrhCRESJysbtaT2CNiKwFqgEj3W33Ac/iJJkFwIi0BmtjjDGnh5zOvsX9KTY2VuPi4rwOwxhjihQRWaiqsZkt87qR2hhjTICyBGGMMSZTxaaKSUT2AH95HUcBRQF7vQ4igNj5OJWdj5PsXJyqIOejnqpmep9AsUkQxYGIxGVVF1gS2fk4lZ2Pk+xcnMpf58OqmIwxxmTKEoQxxphMWYIILGO9DiDA2Pk4lZ2Pk+xcnMov58PaIIwxxmTKShDGGGMyZQnCGGNMpixBBAARqSMi00RkpYisEJFhXsfkNREJFpHFIvKD17F4TUQqisiXIrJaRFaJSGevY/KSiNzrfk6Wi8h4EYnwOqbTSUTeE5HdIrLcZ15lEZniDtE8pbBG4LQEERiSgftVtQVwJnBnJsOzljTDcDp5NPAG8IuqNgPaUoLPi4jUAu4GYlW1FRCM01N0SfI+zgibvh4GpqpqY2CqO11gliACgKruUNVF7vNEnC+Af4ygV1KISG3gQuBdr2PxmohUAM4C/gegqidUdb+nQXkvBCglIiFAaWC7x/GcVqo6E8jYu/UlwAfu8w+A/oVxLEsQAUZEooF2wHyPQ/HS68CDQKrHcQSC+sAeYJxb5fauiJTxOiivqOo24GVgM7ADOKCqk72NKiBUU9Ud7vOdOMMnFJgliADijqr3FXCPqh70Oh4viMhFwG5VXeh1LAEiBGgPvK2q7YDDFFL1QVHk1q1fgpM4awJlROQqb6MKLOrcu1Ao9y9YgggQIhKKkxw+UdWvvY7HQ12Bi0UkHvgMOFtEPvY2JE9tBbaqalqJ8kuchFFSnQtsUtU9qpoEfA108TimQLBLRGoAuH93F8ZOLUEEABERnDrmVar6qtfxeElVH1HV2qoajdP4+JuqlthfiKq6E9giIk3dWecAKz0MyWubgTNFpLT7uTmHEtxo7+M74Fr3+bXAxMLYqSWIwNAVuBrn1/IS93GB10GZgDEU+ERE/gRigH95G4533JLUl8AiYBnOd1iJ6nZDRMYD84CmIrJVRG4EXgB6i8g6nFLWC4VyLOtqwxhjTGasBGGMMSZTliCMMcZkyhKEMcaYTFmCMMYYkylLEMYYYzJlCcIEHBFREXnFZ3q4iDxdSPt+X0QuK4x95XCcy92eV6dlmB/tvr6hPvNGi8h1OezvNhG5Jod1rhOR0VksO5SH8I0BLEGYwHQcuFREorwOxJfbOVxu3QjcrKq9Mlm2GxgmImG53ZmqjlHVD/Nw/EKTx9dtihFLECYQJePc/HRvxgUZSwBpv4xFpKeIzBCRiSKyUUReEJEhIvKHiCwTkYY+uzlXROJEZK3b91Pa+BP/FpEFIvKniNzqs99ZIvIdmdzBLCKD3f0vF5EX3XlPAt2A/4nIvzN5fXtwumS+NuMCEWkoIr+IyEL3uM3c+U+LyHD3eUc3xiVuzMt9dlHT3X6diLyUYd+vueMoTBWRKu68GBH53d3fN2njCIjIdBF5XUTicJLZ5e5rXCoiMzN5TaYYsgRhAtVbwBC3u+vcagvcBjTHuTO9iap2wuk2fKjPetFAJ5wuxce4A87ciNMzaEegI3CziNR3128PDFPVJr4HE5GawIvA2Th3OHcUkf6qOgKIA4ao6gNZxPoiMFxEgjPMHwsMVdUOwHDgP5lsOw64VVVjgJQMy2KAK4DWwBUiUsedXwaIU9WWwAzgKXf+h8BDqtoG587kp3z2Faaqsar6CvAk0EdV2wIXZ/GaTDFjCcIEJLc32w9xBofJrQXu2BrHgQ1AWjfQy3CSQprPVTVVVdcBG4FmwHnANSKyBKer9Uigsbv+H6q6KZPjdQSmux3HJQOf4IzdkJvXt9E9zpVp89zefLsAX7hx/Beo4budiFQEyqnqPHfWpxl2PVVVD6jqMZwSTz13fiowwX3+MdDNTb4VVXWGO/+DDPFP8Hk+B3hfRG7GGaTHlABWt2gC2es4fe6M85mXjPvDRkSCAN96/OM+z1N9plM59b2esX8ZBQTnl/sk3wUi0hOni21/+BdOv0JpX9BBwH63ZJBfvucghaw/47npYyf9davqbSJyBk6pa6GIdFDVhPyHaYoCK0GYgKWq+4DPcap/0sQDHdznFwOh+dj15SIS5LZLNADWAJOA291u1xGRJpLzwDx/AD1EJMqtKhrMyS/7HKnqapxf+f3c6YPAJhG53I1BRKRthm32A4nulzXkfrjNICCt7eZKYLaqHgD+FpHu7vyrs4pfRBqq6nxVfRKnDaVOZuuZ4sVKECbQvQLc5TP9DjBRRJYCv5C/X/ebcb7cywO3qeoxEXkXpxpqkYgIzpdg/+x2oqo7RORhYBpOCeRHVc1rN8sjgcU+00OAt0XkcZzk9xmwNMM2NwLviEgqzhf6gVwc5zDQyd3vbpx2CnAayseISGmc6rbrs9j+3yLSGOd1Ts0kJlMMWW+uxhQxIlJWVdOu3noYqKGqwzwOyxRDVoIwpui5UEQewfn8/gVc5204priyEoQxxphMWSO1McaYTFmCMMYYkylLEMYYYzJlCcIYY0ymLEEYY4zJ1P8Dez9E5jbiacIAAAAASUVORK5CYII=\n"},"metadata":{"needs_background":"light"}}]},{"cell_type":"markdown","source":"## t-SNE ","metadata":{}},{"cell_type":"code","source":"####### t-SNE Plot Generation\n###### Model Creation\n#with tpu_strategy.scope():          \n#    tsne_model = tf.keras.models.Model(inputs=model.input,outputs=model.layers[-4].output)\n#    tsne_model.compile(tf.keras.optimizers.Adam(lr=1e-4),loss='categorical_crossentropy',metrics=['accuracy'])\n#tsne_model.summary()\n\n###### Model Predicted\n#embeddings_final = tsne_model.predict((X_dev,y_exp))\n\n###### t-SNE plot plotting\n##### Reduction to Lower Dimensions\ntsne_X_dev = TSNE(n_components=2,perplexity=30,learning_rate=10,n_iter=2000,n_iter_without_progress=50).fit_transform(Test_Embeddings)\n\n##### Plotting\nj = 0 # Index for rotating legend\nplt.rcParams[\"figure.figsize\"] = [12,8]\nmStyles = [\".\",\",\",\"o\",\"v\",\"^\",\"<\",\">\",\"1\",\"2\",\"3\",\"4\",\"8\",\"s\",\"p\",\"P\",\"*\",\"h\",\"H\",\"+\",\"x\",\"X\",\"D\",\"d\",\"|\",\"_\",0,1,2,3,4,5,6,7,8,9,10,11,0,1,2,3,4,5,6,7,8,9,10]\nfor idx,color_index,marker_type in zip(list(np.arange(89)),sns.color_palette('muted',47),mStyles):\n    plt.scatter(tsne_X_dev[y_dev == idx, 0], tsne_X_dev[y_dev == idx, 1],marker=marker_type)\n#plt.legend([str(j) for j in range(89)])\nplt.savefig('tsne_plot_5000_iters.png')\nplt.savefig('tsne_plot_5000_iters.pdf')\nplt.show()","metadata":{"execution":{"iopub.status.busy":"2021-07-29T11:29:20.256645Z","iopub.execute_input":"2021-07-29T11:29:20.257050Z","iopub.status.idle":"2021-07-29T11:29:22.281488Z","shell.execute_reply.started":"2021-07-29T11:29:20.257019Z","shell.execute_reply":"2021-07-29T11:29:22.280399Z"},"trusted":true},"execution_count":25,"outputs":[{"output_type":"display_data","data":{"text/plain":"<Figure size 864x576 with 1 Axes>","image/png":"iVBORw0KGgoAAAANSUhEUgAAAscAAAHSCAYAAAAABWabAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8rg+JYAAAACXBIWXMAAAsTAAALEwEAmpwYAAA93klEQVR4nO3deZhcZZ33/8/d1V2dXrI06STdkLCEAIILkTQQFmVRo2OeRAUVEZcZdTCJXmTUn1zo+MzkekR/AjI+wfERGHREfgh4gYw44XFnT0LSkSBLDISQkITukKXJ0um16v790VWd6upaTi2nzjl13q/rytXdp05V3T1tJ5/58r2/t7HWCgAAAIBU4/UCAAAAAL8gHAMAAAAJhGMAAAAggXAMAAAAJBCOAQAAgATCMQAAAJBQ6/UCUrW2ttoTTzzR62UAAACgym3YsGGvtXZa+nVfheMTTzxRnZ2dXi8DAAAAVc4Ysz3TddoqAAAAgATCMQAAAJBAOAYAAAASCMcAAABAAuEYAAAASCAcAwAAAAllCcfGmJ8aY94wxjyfcm2FMWaXMWZj4s8Hy/FeAAAAgFvKVTn+maQPZLj+A2vt3MSfh8v0XgAAAIAryhKOrbWPS9pfjtcCAAAAvOJ2z/GXjTF/TbRdtLj8XgAAAEBJ3AzHP5Z0sqS5krok3ZzpJmPM1caYTmNM5549e1xcDgAAAJCba+HYWrvbWhuz1sYl/Yekc7Lcd7u1tsNa2zFt2jS3lgMAAADk5Vo4Nsa0p3z5EUnPZ7sXAAAA8IPacryIMeYeSRdLajXG7JT0r5IuNsbMlWQlbZP0xXK8FwAAAOCWsoRja+2VGS7/pByvDQAAAFQKJ+QBIWWtVc+99+ml885Xz733yVrr9ZIAAPAc4RgIodiBA9qxZIl233CDYj092n3DDdqxZIliBw54vTQAADxFOAZCaMfSZepdvUa2r0+SZPv61Lt6jXYsXebxygAA8BbhGAih6EknSrHY2IuxmKKzZ3uyHgAA/IJwDITQ5EWLVdPQMOZaTUODJi9alPe59CoDAKoZ4RgIocaOeZIkE42O/km9no3bvcoEbwCA14yf/vHp6OiwnZ2dXi8DCIUjGzao/8VNo19POON0Nc7LHY63ffIq9T33nDQ0dPRiXZ0a3v52nfiLu0taT+zAAe269lodWbdetq9PpqFBjeecreNuvFGRyZNLem0AANIZYzZYazvSr5dlzjGA4GmcNy9vGE4XPelE9W3cOPZikb3K1lq9ed8vtWflSk1bvlwHHvq1+p57fjR4p24SLDV4AwDgFOEYgGOTFy3Wod/+TvHe3tFrTnuVU6VXiXffcIMikydJw8NpN7JJEABQWYRjAI6l9ipnuu7UjqXLxrRn2L4+DQ8OSsZIKa1exQRvAABKQTgG4JiprdWs228b16tsIpEx96W3TEy54uMyxow+nrE9Ix6XIhGZ2rF/LRUavAEAKAXhGEBB8vUqZ2qZOPTIn3XsDTfo0G9/pz0rV2rS4sUyEybIHjky+ryaxka1/tM/yaS8VqbgDQCAm5hWAaAk6VXiN3/9a/U///zYiRa1tappbJQdGhqdRGH7+0daKCIRKRJRTV2dTl33NGEYAFARTKsAUHaON9YNDyt+6NBoP3Hy2GpJUk2NosfP0oyvf51gDADwHIeAACjajqXL1Lt6zWjYtX19Gt67b2RjXaraHP9/+NCQBre/pr233e7iSgEAcIZwDCAjJ6fVRU86UYrFxl6Mx6WamjGn75naWpnGxuxvxsg2AIBP0FYBhFC+aRLZNtWln1aXbe7xtH9aLqVsras/7VTtXLpMikZl4/FxbReMbAMA+AUb8oCQcXJMs9Njou3wsF46d75syn0my8a65HHVNh7Tnpv/TTYel6mpyfkcAADcwoY8AJIyH8CRfkyz02Oinc49lsaOgGt429scPQcAgEojHAMh4yT4FnJMdL65x5kU8xwAACqBDXlAyExetFg1DQ1jrqUH39RjopN/Uq8DAFCtqBwDIZMafDNdlwprl0iXb7MfAAB+xoY8IISSm+OSJpxxelnaHJxs9gMAwA+ybcgjHAMoG6dTLgAA8Fq2cEzPMYCyyXgoCAd8AAAChHAMoGycbPYDAMDP2JAHoGycbPYDAMDPCMdAyJVzukQpUy4AAPADNuQBIcZ0CQBAWLEhD8A4O5YuU+/qNbJ9fZLGHiUNAEAYEY6BEGO6BAAAYxGOgRBjugQAAGOxIQ8IMaZLAAAwFuEYCDGmSwAAMBbhGAi5xnnz1DiPSjEAABI9xwAAAMAowjEAAACQQDgGAAAAEgjHAAAAQALhGAAAAEggHAMAAAAJhGMAAAAggXAMAAAAJBCOAQAAgATCMQAAAJBAOAYga6167r1PL513vnruvU/WWq+XBACAJwjHQMjFDhzQjiVLtPuGGxTr6dHuG27QjiVLFDtwwOulAQBQcYRjIOR2LF2m3tVrZPv6JEm2r0+9q9dox9JlHq8MAIDKIxwDIfL6d59Wz4NbFDs4OHotetKJUiw29sZYTNHZsyu7OAAAfIBwDIRI/OCgeju71XXj+tGQPHnRYtU0NIy5r6ahQZMXLfJolQAAeKfW6wUAqLCYlWTV29mt3g271Ti3VdZamWh0zG2NHfO8WR8AAB4iHANhlQjJRza8oaaLv6Kmdx59aMIZp8tEIp4tDQAArxCOgbCKGMkYNXXM0KT3nKvIxGj+5wAAUOUIx0DYjAnFxxOKAQBIQTgGQqRmUlQNZ0wlFAMAkAXhGAiRY795rtdLAADA1xjlBgAAACQQjgEAAIAEwjEAAACQQDgGAAAAEgjHAAAAQALhGAAAAEggHAMAAAAJZQnHxpifGmPeMMY8n3LtGGPMH4wxLyc+tpTjvQAAAAC3lKty/DNJH0i7dp2kP1lrT5H0p8TXAAAAgG+VJRxbax+XtD/t8ock3Zn4/E5JHy7HewEAAABucbPneIa1tivxebekGZluMsZcbYzpNMZ07tmzx8XlAAAAALlVZEOetdZKslkeu91a22Gt7Zg2bVollgMAAABk5GY43m2MaZekxMc3XHwvAEV4/btPq+fBLYodHPR6KQAA+IKb4fghSZ9NfP5ZSb928b0AFCF+cFC9nd3qunE9IRkAAJVvlNs9ktZIOs0Ys9MY83lJ35P0PmPMy5Lem/gagN/ErDQczxqSC60uU40GAARZbTlexFp7ZZaH3lOO1wdQAbGRrQG967rU/3KP2q89W9LR6nLvht1qmjdDk95zvCKTollfptD7pZFA3XD6VEf3AgDgprKEYwBVIGIkY9TUMRJox0gGZ6eht8D7iwnUAAC4gXAMhF1aKI5MzBFKs1SXy3J/oQEcAAAXEI6BEKuZFFXDGVPzh+KkXNXlctwvFR7AAQAoI8IxEGLHfvNcZzcWUl0u5v4czwUAoJIIxwByKrS6XHA1OqmUQA0AQJkQjgHk5Li6nCrrmZiZFR2oK8Raqzfv+6X2rFypacuXa8oVH5cxxutlAQBcYEZOdvaHjo4O29nZ6fUyAJRg53VPHK0CV8GmutiBA9p17bU6sm69bF+fTEODGs85W8fdeKMikyd7vTwAQJGMMRustR3p1908IQ9AFct52Eeeg0WCZMfSZepdvUa2r0+SZPv61Lt6jXYsXebxygAAbiAcAyFR7pPrHB09nQzJ67r0xq3PluV9Ky160olSLDb2Yiym6OzZnqwHAOAueo6BkMh30EamU+rynlyXYTbxGFUweWLyosU69NvfKd7bO3qtpqFBkxct8nBVAAC3EI6BMMlx0Eam8Oz45LqU2cSSqmryRGPHPEmSiUYzXgcAVBfCMRBG2Q7aSAvPma4lQ/IYKWG474W9anhra+BDcZKprdWs229T/4ubRq9NOON0mUjEw1UBANxCOAbCKF+7QyzDLLa0QJ3pdSITo2r58By3V19xjfPmqXEelWIACAPCMRAmTtsdEvdpOJ71ubt/+MyY2cR5+5MBAAgAwjEQEo4O2kgLwF3feTproE4/HMRxfzLgstuXP6ahgZjq6iO6euVFXi8HQMAQjoGQyHfSXabwXPDJdcnWi6e71LuuSw1zp2vK351ESEZFDQ3ExnwEgEJwQh6Asth53ROZH6iPqP1rHQRklCxfRTj5uKkxsnE7+rHQCjKVZyAcsp2QR+UYCDnXe4WH4+q6cT2tFihZvopw8rqN2zEfU+93EnypPAPhRjgGQi5Tr/Duf3+muMCcaSNftrFxgEOZKsI/WvLncQG3rj6StXKclCv4On0fANWNcAxg/Hzj4bh613cVtLkutT+56ztPH32gCk7Jg7ecVIQljQbYHy358+h9X7r1UknOgq/T98mHtgwg2AjHAI5KnW8clxSP551AkWzLmPHld459rIpOyYO/ZKoIp0pWkDNVjHMF31yV50ICL20ZQLARjgEcVURbRKa2jIKnXAAZpFd7k1Irwpk4qdZmCti5Ks/Ja7kCL20ZQHUgHAPIPN84y2MZZTpi+lKCMUqTXu11Ir3CW0zATq08FxJ4y9WWAcBbhGMg5LJWeYtpi2DzHcrIyQa7dOktDYUG7PRwnVpFTv2YKfAWs14A/kM4LlLvgQH96qYNuuzr89Q0ub7oewCvZTocpOi2CDbfoYxytTmky1bhTXIaWNNDdSGBt5D1AvAvwnGROle9qoP7+tX58DZddOVpOe9Z8+Ar6tryJiEZgZHvNL1x2HwHF2XaYJcuW0uDJH3p1kvzBtZc7RNOnl/oegH4F+G4CL0HBrRpTbdkpU2ru9TxwRPHhd7Ue15a1y0b15ggTVUZ1YLNd3Cbk81s6RXebI/nqxhna58oJPCy+Q4INsKxCg+qnateHfMXaKbq8dh7Rq6lBmknlWcgCAquMpfJqq2rtPIvK9Xd2622pjYtP2u5Fs5e6Mla3Bam77VY6S0NSekV4GzytU8QeIHwIBzLWYtEUrIiHI+NBN94zI6rHqffkxSPxdX58DZ1fPDEvJVnoJqU+4jqVVtXacXqFeqP9UuSunq7tGL1CkkKTGjMFHglZbwW9O/VS04nRtAvDCCpxusFeC29RaL3wEDO+1MrwknJ6nGue0buG3mPNQ++Mq7y3HtgQHd9a3Xe9weCKDkLuevG9ep5cItiBwdLer2Vf1k5GhaT+mP9WvmXlSW9bqUkw31Xb5esrLp6u/StJ7+l//nU/xxzbcXqFfreuu8F+nuttGSl19SYMR+d9v8m76NfGAiv0FeOnbRIpHr1r/syVIStXn127+jzMt1z9N74aA9y8rmbVndpaGCYNgtUt0yzkIusJHf3dhd03W8yhfthOzx6OGFSf6x/3H1JQfleK63UCjDtEwBCXTnO1iKRq3r799+7QF+69VL9/Q0XKFI38n++SF2NPvaNjnH3NE0Z3yph40d7kI9es9r89G7H1Wsg0GJWGo6rd12X3rj12aJeoq2praDrflOOYBuU79UrVIABFCvUleNcLRL5qrdOKs5//70Lxj3vZ9c9pd43x4bf1Cqz0/cHAqsMs5CXn7V8TB+uJE2ITBjt0fW7tqY2dfV2Obp3cnSyBmID4yrI7575bjeWVjWoAPtX98CQ2urrvF4GkFWow7GTFolMnGzKyyY9MI/0Gq9RbChe8GsBgVLGWcjJjWhBmuCQugFvcv1k1ZrakVaKhFpTK2OMhuJDo9cmRCboG+d+Q8+88Yzu23zfmNf79ZZf653T3+nr7xlIt7m3Xws6N+vGU2fpivZjvF4OkJGx1vmZ9W7r6OiwnZ2dXi8jr8d+8Te9+FTXmGBdEzE648JjC6r49h4Y0N3/ulbDg7ExrRbFvBbgZ69/9+lQz0JOn64hSXU1dWqsbdTBwYM5p1UsnL1QC+5fkLHS3N7Urt9/9PcV+z4QTqVWel/rG9CO/kFZSddsek19sbieOPd0tUZDXZ+DDxhjNlhrO9Kv87/MIjipOOeanZx8rH3OZA31jx8v5KR6DQSJV7OQKyXfHOJMG/CG4kNqrGvUk1c+OeZ6pkpw0DcgIrjKUem9r3u/bt62e/TrC6c0E4zha/yvswjJ1ojeAwP62XVPjewwNxqzKS/X7OTkYwf3jfxjGamr0aevP482CiCAnMxcLjXcZutRZlMe3JBe6W2sqdF7pk4q+vWuaDtG509plpW0fNNrev5wn/YMDmlalL5j+FOop1WUas2vthwdvWSlNf/1iqTcs5NTH0s+N31OMoDgcDJzudTpGsvPWq4JkQljrmXbgLhq6yotuH+B3nHnO7Tg/gVatXWVo/cAku7r3q/LN76ij258Ra8PDOmtzQ0lVXqPb6jXBS0TdWHLRP3izJN1JBbXI/sPlXHFQHkRjovUe2BAm9ftHnNt89pu9R4YyDjJIinThAwnI+QA+EsyhGabOpFaFS4k3GaycPZCrTh/hdqb2mVk1N7UrhXnrxjXgpHpcJEVq1cQkFGQK9qO0QNzT9b9c0/WcfV1o5Xe8rHy0XYnYBzaKoo0pmqcZKUnfvmStqX0JKdOn5CU8VhpiRFuQJBk2mCXLrUqXI7pGgtnL8x7f64qNlMt4NTxDfU6vmGkze8XZ56sBZ2b9cj+Q/p4W3E9x+ltGk2RSEltGoDbCMdFeuWZPRmvb31mj4wxY66NVo+tzXistMQmPCBIMoXQVJmqwk7CbanYuIdyO61pgp6ef0ZJ0yqcbMhj9jH8hHBcpPrGOg0PZmiDsFI8Q9vEq8/uHf08XdOU+owHhgDwp1xhs72p3bOZy2zcgxuchtZsATffhjxmH8NvCMdFIswiqAaO9OoX3/p/9Mnrv6/6xiavlxNI2UKo13OHg35yIPwvWwDOFXAztWnc392jd0xsKNtEDKCc2JAHhMzWv6zX/l07tPUZ5wfuDBzp1X9+dakGjvS6uLLgKHWDnVucbtwDirG5t1/nrn1R93XtlzTSS/xUzyE92XNIVz77ymjA7R7Ivnkv2aZxOBYr60QMoJz4XyIQEqtW3qhXNjyt2PDIkcW//dG/6Q+33aKT552rhcuvzfnc1EB9+gUXjXksjJVoPx9fXYneZoRHrpnHmXqJ9w0N522RaKuvY/YxfI3jo4GQ6Ol+Xf9147d1cM9uDQ8OqjYa1aRpM/SRa/9FU9raMz4nNVDHYzHVRCKK1NaOCdSbnnxUD//w+/rgNV8fF5wBBNtNr3aNC8D3v3OOpKPBeffAkP5lyy71xayaa2s0GLcFHQ+dbMm46bRZRU/EAIqR7fhowjEQIi+tfVKrbrlJkdo6xYaHtPCar+vU+RdmvT9XoH7qvrvyBmcAwZZaOV6+6TX1xuJ64ty3jKnw5grQTjGtAl7IFo7pOQZCZPOaJ1Qbrdf5H/ukaqP12rzmyZz3t7Qdqws+fpXisZjq6icoHovpgo9fpSlt7Tr/ik9pYut01UQikqSaSEQTW6frgis+XYlvBYDLugeGxp1u1xcff7pd8tCQ2996QtGHhhCM4SeEYyBEzl50uT73v29Tx6LL9Ln/fZvOXnx53udkC9S5gjOAYEvffCcd3UyX3vqQDNCLp7dkDdBAkLAhDwiRtjmnjn7eNKVFTVNa8j7n7EWX69J/WKKmKS06/V2X6NC+vaOPJYPzeZd/QmseuFeb1zyZs00DgH/l2nyXlK/CW45DQwCvEY4B5NRy7HGj0yjSA3Wu4AwgGJL9vk5OsnMiUzCmpxhBQlsFgJxyzUVum3PqaFhumtKitpNPqfTyAJQgtX0i2Tt8/9yTi+4dTkqddZypRQPwM8IxEBKFHuSxauWNuuUzl+u3/+cHkkbmIt/ymcu1auWNRb8mAO9lO7zDyeY7Jzb39uucNS/o+i2vj3sPIAgIx0BIFHoynpNpFE5fkxAN+Md93fvznk6XbfNdNumBu0ZG/77jDU7AQyAx5xiock4O8sh2yt1La5/Uf6+8SdbGZUyN/sfykbnITl4zFQeFAP7hZHZxodJnHXdMbNQ3Tm4v63sA5cacYyCkSqkAb17zhCK1EclaRWojo2PcnM44dtKaAaCyytU+kSq9X3lL34BObZpQ1vcAKoXKMRAC2U7Gy1UBlqSX169RPBaTjcdlampUE4nolLPP08Ll1zo6ba+YI6sBVFahkyTy3Z/pOGimVcCPqBwDIZbtII9cFeDzr/iUJk9vU6R2pE8wUlurydPbdPaHLtd/fnWpXnzikbyn7XFQCOB/hYRWJ5MnMvUrE4wRJHTHAyGQbR5xMryuuuUm1dVPUGx4aEx4zfTYvp07tH/XDp1+wUV63z9+Oe+MYw4KAYLNyeEg6QjDCDLaKoCQ+80P/l9te/aZ0fB64plnadFXrhv32BP33ClrrYwxjjbhJXVveUkTW6epaUqLet/s0aF9e5mHDARI+ma7C6c06/53zvFwRUB5ZGurIBwDIZcrvKY+9vpLf9OqW27SkQM99A8DIeLGdAvAD+g5BpBRrlPuUh879tS36KJP/QP9w0DIuDHdAvAz18OxMWabMeY5Y8xGYwxlYSDAsm3sAxB8qUc+Z1Po4SBAEFVqQ94l1trMu3UABEa2jX0Agi05fu3GU2fpivbxwTd1FBub7VDtmFYBwLG2OaeOft40pWW05QJA8DidQpEvOAPVphLh2Er6vTHGSrrNWnt7Bd4TAADkcF/3/nFTKFqjI7GgmPFtQLWoRDi+0Fq7yxgzXdIfjDF/s9Y+nnzQGHO1pKsl6fjjj6/AcgAAwBVtx+j8Kc2jUyieP9ynPYNDmhatyxmcgWrn+oY8a+2uxMc3JD0o6Zy0x2+31nZYazumTZvm9nIAAIByT6G4ou0YPTD3ZN0/92QdV183GpyBMHA1HBtjmowxE5OfS1og6Xk33xMAABQmfQoF49sQZm7/N5IZkh40xiTf6xfW2t+6/J4AAKBA2aZQJIMzUyoQFq6GY2vtVklnuvkeAADAXQRjhAkn5AEAAAAJhGMAAAAggbksAAAE2MsXXazh3bvHXa+dMUOnPPZo5RcEBByVYwAAAqz5kkukurSe4Lo6NV96qTcLAgKOcAwAQIC1LlsqUzP2n3NTU6Npy5Z6tCIg2AjHAAAEWN306Zr8kY8crR7X1WnyZZeploO1gKIQjgEACLjU6jFVY6A0hGMAAAJutHpsDFVjoESEYwAAqkDrsqWqmzmTqjFQIka5AQBQBeqmT9ecP/ze62UAgUc4BgDAx5hjDFQWbRUAAPgYc4yByiIcAwDgY8wxBiqLcAwAgI8xxxioLMIxAAA+xxxjoHIIxwAA+BxzjIHKIRwDABAAzDEGKoNRbgAABABzjIHKoHIMAAAAJBCOAQAAgATCMQAAAJBAzzEAAB7haGjAf6gcAwDgEY6GBvyHcAwAgEc4GhrwH8IxAAAe4WhowH8IxwAAeIijoQF/IRwDAOAhjoYG/IVpFQCA0PDrdIjWZUvV+9RTVI0BHyAcAwBCI3bwYEHXK4WjoQH/oK0CABAaExcsyHz9/e+v8EoA+BWVYwBAaEz/2ld18De/keLxoxdrajTja1/N+1y/tmQAKC8qxwCA0KibPl2TFi0ac23S4sWONsFxYAcQDoRjAECoTP/aV6XkwRsOq8YSB3YAYUE4BgCESmr12GnVOPk8DuwAqh/hGAAQOtO/9lXVzZrluGqcxIEdQPVjQx4AoOpl20z36kc/VtBmumT1+M377qNqDFQpKscAgKpXzs10rcuWqm7mTKrGQJWicgwAqHqty5bqwIMPyqZcS2+LcDqqjQM7gOpG5RgAUPWcbKZjVBsAiXAMAAiJfJvpGNUGQCIcAwBCYrR6bEzGzXSMagMgScZam/+uCuno6LCdnZ1eLwMAUGWc9hMPvfGGXnnfAtmBAZn6es354x8Ix0CVMsZssNZ2pF9nQx4AoOo1X3KJ3nzgAWlo6OjFDP3EmUa1OQ3WAKoDbRUAgKpXSD9x+qg2NuoB4UI4BgBUvXH9xJLswIBefte7tektp2vTW07XyxddPHrvnD/8frSdgo16QLjQVgEACIVMs45H5agEj7ZaJNsyUjbq0XIBVB8qxwCAUEidVqG0SrCGhvTmPfeMqSCnyjYGjpYLoPoQjgEAoZHsJ560aNH4gCxlDbbZxsDRcgFUH8IxACA0kv3E07/2VZna8Z2FuYJt+ka95OsxGxmoLoRjAEDo1E2frsmXXTbyhTGJi7mDbfpGvaR8J+8BCBY25AEAQql12VIdfvxxDe/dKw0OFh1sM81GTsfGPSA4qBwDAEKpbvp0nfLnP2nKZZdlPVLaqUwtF6nYuAcEB8dHAwBCbeiNN7T9qk/pxF/cPSYcl7Pam3osdRLHUwPeynZ8NJVjAECoZeslLme1l417QHAQjgEAyKDcY9rYuAcEAxvyAABIyNZKIankaq+TjXsAvEflGACAhIytFAnlqPbm27gHwHuEYwAAEjK1UigSkaSyVHuz9TcD8A/CMQAACZk2zk1a9D9UN2sW1V4gJOg5BgAgReuypTrw4IOyGmmlmPG1r1HpBUKEyjEAAClGq8clHgwCIJgIxwAApGHjHBBetFUAAJAmuXEOQPi4Xjk2xnzAGLPZGLPFGHOd2+8HAAAAFMvVcGyMiUj6kaS/k3SGpCuNMWe4+Z4AAABAsdyuHJ8jaYu1dqu1dlDSvZI+5PJ7AgAAAEVxOxwfJ2lHytc7E9cAAAAA3/F8Q54x5mpJV0vS8ccf7/FqAACQXr7oYg3v3j3ueu2MGTrlsUcrvyAAFeN25XiXpFkpX89MXBtlrb3dWtthre2YxixJAIAPNF9yydFT8pLq6tR86aXeLAhAxbgdjtdLOsUYc5IxJirpE5Iecvk9USEbN27U4cOHJUmHDx/Wxo0bvV0QAJRJ67KlMjVj/4k0NTXMPQZCwNW2CmvtsDHmy5J+Jyki6afW2hfcfE84t3HjRs2ZM0fNzc06fPiwtmzZorlz5zp67sGDB/Wb3/xGdXV1amtrU3d3t4aGhjR79mxNmjTJ3YUDgMuSp+S9+cAD0tCQVFfHaXlASLjec2ytfVjSw26/DwpTaridNGmSlixZorvuukvbtm3TpEmT9PnPf76gYFxKOAcAt7UuW6oDDz4oK6rGQJh4viEP3ihHuJ02bZo+/OEP6+c//7k+/OEPq5CecSrPAPxutHp8331UjYEQcf2EPPhXMtxKKjjcJhljxnx0KhnOo9Gotm3bpmg0qiVLlhCMAfhK67Klqps5k6oxECKE4xDJtIGu2HCbNGXKFJ122mmKRqNjXteJcoRzAHBT3fTpmvOH31M1BkKEtoqQSG1jaG5u1qFDhzQ8PKwrrrhCp512mqZMmVLU60YiEW3ZskXbt28vqj2i1HAOAABQTlSOQyLZxlBbW6u9e/dqYGBAbW1t+tWvfqUtW7YoEokUNZqt1PaIKVOm6KKLLio6nAMAAJQTleMQmTZtmi677DL9/Oc/V2Njo3bt2jW6EU9S0RvkStmY19LSoksuuaSk7wsAAKBcqByHTLJ94cILL5R0tNe31Aow7REAAKAaEI5DJtnGMHHiREljw2wpG+SC2h6xYXuPfvTIFm3Y3uP1UgAAgA/QVhEyyTaGnp6ejGE2WwU434EdQWyP2LC9R1fdsVaDw3FFa2t09xfma94JLV4vCwAAeIhwHFLZwmymCnC1Htixdus+DQ7HFbfS4HBca7fuKyocb9jeo7Vb92n+7KmEawAAAo62ipBwOokiGZpbWo6GvGo9sKOlMaq4Hfk8bke+LlSy+nzz7zfrqjvW0p4BVJG+vp3aunWl+vp2er0UABVE5TgEylH5LXYiRb52DC/1HBkc/dykfe1UavV5qITqMwD39PXtVFfXA2pvv1wNDTMdP6+/f6de3XaLWlrOLeh5AIKNcBwCycrvXXfdpW3bto2Obyu08lvoRAq/t2OkVoqtiqscz589VdHaGg0Nx1VXW6P5s6eWcYUAyqHYkGttfMxHAOFAW0VIlOOo5kInUvitHSN9MkXPkUHVJHJ+jSmucjzvhBbd/YX5+uqC09jQB/hUMSH3cO/LenHTtZKkFzddq8O9L7uyNgD+QzgOkVJnEW/fvl1nn322WlpaHJ+gV45QXg6ZeoOTVd+IkaIlVH3nndCiL10yh2AM+FAxIXdgYLfWr1+sWOyIWlrOUyx2ROvXL9bAwG63lwvAB2irCJFSZhGX0iLhhwNCMvUGf+mSObr7C/MLnjTBdAogGJIht6amQS0t5+nQoRe1fv1inX/eo6qvn5H1efX1M/SW067X1KkXKRpt1eDgXu3b93jO5wCoHsZa6/UaRnV0dNjOzk6vl4Es9uzZo7vuuksHDx7UpEmT9OlPf9pRJbinp0cbN27U3Llzx0zBqKRk5TjZG1xsCwSzkYFg6ep6YFzIbW+/zOtlAfABY8wGa21H+nXaKuBYrhaJXKPiMo2Hq7Ry9QZnqkAD8K/29ssVjbZKkqLRVoIxgLxoq0BBMrVI+H0qRdK8E1pKrvIynQIAgOpG5RgFydS37LepFG5iOkX4rNq6SgvuX6B33PkOLbh/gVZtXeX1kpBBOQ7s4NAPABLhGAXK1iLhl6kUlcB0ivBYtXWVVqxeoa7eLllZdfV2acXqFQRkH0rOMi4l3CZfo78/8/MJz0A4EI5RNn6YSgGU08q/rFR/rH/Mtf5Yv1b+ZaVHK0I2yRnG3bt/lTXcOn2NbPOQDxx4Rq9uu0UHDjxT3CIBBALhGGVTyqg4P0k/LCTbNVS/7t7ugq7DG6mzjCXpSN+Okl4j2zzkgYE3xnwEUJ3YkIeySbZcBFmmUW2SGN8WUm1Nberq7cp4Hf6QnGVsTFTGRGXtoP72t+vU0DBLx7TML+g1cs1DPtz7sra/dqskaftrt2pq67vV3HSKa98XAO8QjoEU2Ua1pV8jHIfD8rOWa8XqFWNaKyZEJmj5Wcs9XBVS1dfP0MknX6stW25QJNKopqZ36NChF/Tss/+Q97CP1NfIdejHwMBurVu3SNYOS5KGhnq0bt0iXXD+Y64dDNLXt1NdXQ+ovf1yNTTMdOU9Kmn+3fPVO9w77npTbZPWXrXWgxUB2RGOgRTZRrUxvi2cFs5eKGmk97i7t1ttTW1aftby0evwh+Nn/YPqaieVdKJde/vlo59nnodsVVs7URMnvlWHDr2gWOxImVafWXJzYEvLuVURjjMF41zXAS8RjoEUyVFt6cdDF3PMNKrDwtkLCcMBkD/cFq++foZOf8t3K3qcdKbNgdVWTQb8inAMpMl0WEg5DhBBsBzu2a+1D9yjVzas0xd/fKfXy4HH3Azf6dI3B86d+59qbjql6qrJgF8RjgEgRTIUP//onyQbV2x42OslwcfKXc3NtTkw36g55EbfM5wiHAOACMUoTrmrudk2Bw4NH8xYTYZz9D3DKcIxAEi691+u1YE9uyVrvV4KAsSNam56C8cxx1yg1Wsuzjlqzu+aapuyVm2zodJb5V7fKD10jTTcL9VOkBbfIh071+tVSSIcA4Ak6cpv36Q1D9yjFx79o2ycyjHyy9YbXG75Rs0FQTFhlkpvFXt9o/SzhdLg4aPXfrZQ+vtVvgjInJAHAJKstZK1ijY26W2XLlBtNKpILfUDZJbsDY7Fjqil5TzFYke0fv1iDQzsduX92tsvVzTaKsn9DYGA6x66Zmwwlka+fugab9aThr/5AVSNYiZMZOo1fu/nl+m8y6/Umgfu0SudT7u8agRRNVRzAc/0v5n5+nB/5usVRjgGEHjFbKbL95ymKS167+eX6b2fX+bWshFwlRzvhtLl63vO1uOc6X56nkvw+kbpwI7Mj9VOqOhSsiEcAwisUiZMsAEPCJd8gdZpLzM9zyV66Bop4wbWmpFNeT5AOAYQWKUEXDbgAf7jpHqba8IFAiBb68SUWb7YjCcRjgEEWCkBN9k2kewtJiQD3ssVjJ/77HMVXAlck611YsKUii4jF6ZVAAisZMD9wg9/4mjCxOGe/frjHT/SbUs/m/U1mo+ZWomlA0A4Lb5FijaPvRZt9k1LhUTlGEAVSK8Cp0+YcNKbzAY8wN/m3z2fjXDV4Ni5I/OMfXoAiEQ4BlBF0gOuk1BczPg3ICz8dEpdIRvhill3tmkWme5DiY6dKy153OtVZEU4BlC1cm3Y+78/+je9/PRTisdtwZMugLAI6il1xaybqjSS6DkGULWu/PZNOvN9f5exF/nFx/+soYEBxYYGCcaoSn19O7V160r19e30eimOUZWFHxCOAVStZJvFldffrCltx3q9HKCi+vt36tVtt6i/PzjhmOptlXh9o3Tru6V/P2fk4+sbvV5RQWirAFD1Hvr+d0baK4AQsYmDFmzGAxcqw089y6iA1zdKD3xB2rdFUko7288WjmzC89Gmu1yoHAOoernaK/KNfwOC6HDvy3px07WSpBc3XavDvS87et78u+fr7Xe+ffRPqYrp/c3WWkHLhc+9vnEkBO97WWOCsSQNHh6ZThEQ/IsAoOrlOvDjCz/8Scbxb0BQDQzs1vr1i1VT06CWlvN06NCLWr9+sc4/71HV18/I+VynG+1Sg2qu6nAxylFRzjZ5goBdBq9vzDyG7aFrRkJwNtlOxvMhwjGAQCtkFFumecjMN0a1qa+fobecdr2mTr1I0WirBgf3at++x/MG43yynVBXrokW5WzBoGXDJcnqcGoITrZM5Au/2U7G8yHCMYBAOjrD+I8j0yYyjGvLhkCMatfefvno59Foq9rbL/NwNc4EdWxcqGSqDidbJnKFX1PjqxPw8iEcAwiUUkIxAKAE2arDw/3SZbdLt1+scf3GkjR5VmA240mEYwABc8+3vqaDe/d4vQwACJ9s1eHaCSPhd+qcxIa8NBOmuLmqsmNaBQBfO9yzX3+840e6belndbhnv/oOHfJ6SUDVYlIEclp8ixRtHnst2ny0ZeLyO3I/HpD5x1SOAfjS0faJP40e73zvv1yrocEBr5cGVK1iNrJlmwyBKnTs3JHNd5mmVeR7PNdmPp+1XBCOAfhKplCcdOW3bxodxRaPxRWPcewz4LX0QF3sfGTGrwXEsXOlJY8X/niuzXy5Xs8DhGMArtuwvUdrt+7T/NlTNe+Elpz33vsv146cZpdho12mecXJkNx8zFS3lg+gAMVWkhm/VuVybebzGXqOAbhqw/YeXXXHWt38+8266o612rC9J+f9uU6zS0qG5C/88Cd6+3sWqPmYqXlnHAOojLVXrdVzn32Oii/GyrWZz2cIxwBctXbrPg0OxxW30tBwXGu37st5f2rwfdulCxyFZIIx4D9rr1rLBr9qVczGunyb+XyEtgoArpo/e6qitTUaGo6rrrZG82c7a3/IdJodgGChVaIKFbuxLt9mPh8x1kcD9Ds6OmxnZ6fXywBQZoX0HAMAPPDYTdIj39HoIR4NU6XJM8cH2FvfLXU/O/75bWf6bmNdPsaYDdbajvTrVI4BuG7eCS2EYgDwq8dukh65fuy1vn0jf9KrwgHaWFcseo4BAADC7JHvZH8sOW4tKUAb64pFOAYAAAi1PC22qVXhAG2sKxbhGIDnUo+IBgBUmsn9cGpVOLmxru1MqfW0kY8+POWuFK71HBtjVkj6R0l7Epe+aa192K33AxA8uU7DAwBUyCX/PL7nOClTVTjfKXkB5/aGvB9Ya7/v8nsACBhCMQD4yEVfH/noZFpFCDCtAkDF5ToiGgDggYu+fjQkh5zbPcdfNsb81RjzU2MMc5wASHJ2RDQAAF4oKRwbY/5ojHk+w58PSfqxpJMlzZXUJenmLK9xtTGm0xjTuWfPnky3AKgyhRwRDQBAJVXkhDxjzImS/tta+7Zc93FCHhBOvW/2jB4R/cUf3+n1cgAAIZDthDzXwrExpt1a25X4/CuSzrXWfiLXcwjHAAAAqAQvjo++0RgzVyPbHrdJ+qKL7wUAAACUzLVwbK39tFuvDQAAALiBHTAA4FB8MKaDf35NvWu71HxeuyZecrxqohGvlwUAKCPCMQA4MLD1gPb9fy/KDsVlh+I6/OTr6n26W1M/dYbqZ08evW/48KD2/fwFDb12WHXHT9TUz5yh2uaohysHABTC7TnHAFAVetd1K35kWHYoLkmyQ3HFjwyrd1336D2Hntql7uuf1tBrhyVJQ68dUvf1T+vQU7s8WTMAoHCEYwAok8NPZg7B2a4DAPyHtgoA8DH6nAGgsgjHAOBA0zlt6n9p/2jPsamrkamrUdM5ba69p9M+ZwBA+dBWAQAO1M+erLbrzlHzhcfKTIio+cLj1HbdOWNCavOFx2V8brbr+TjpcwYAlBeVYwBwqCYa0eT3n6TJ7z8p4+MTLzhODWdO076fv6ih1w4pesJEHfNpplUAQJAQjgGgjGqbo5qxbK7XywAAFIm2CgDwqaZz2lTTWCtTN/JXtamrUU1jrat9zgAQdlSOAcAnMk2maLvuHB165DUdXtOl5vOO1cRLZjGtAgBcRDgGAB/INZkiV5+zxLg3ACgnwjEA+EByMkVSMiT3ruvOObaNcW8AUF6EYwCoEDcqvMWGagBAZoRjAKiAIFd4adsAECaEYwChsGF7j9Zu3af5s6dq3gktFX//fBXecp/AZ+NWUunBNsihHgCKQTgGUPU2bO/RVXes1eBwXNHaGt39hfmeBORckifwFTqZoumcNvVt2ic7EBtzvX/zfh16apcO/em1koItbRsAwoZwDKDqrd26T4PDccWtNDQc19qt+3wXjqX8J/BlUj97siac1qK+v+4dc90OxHT4yV0EWwAoEOEYQNWbP3uqorU1GhqOq662RvNnT63I+6a2NEx4yzEyDRFp2JalbSKVqeE8JwAoF8IxgKo374QW3f2F+RXtOU7v1e1/YZ9UW6MJbztG/Zv2B+ZAj3L3QgOA3xGOAYTCvBNaKtpKkalXV0NxGWt03Irzy/pe2QJs84XHjek5LibYFtsLDQBBRTgGAJ/LN3EiV4BtOrut5GBbTC80AAQV4RgAfMzpKLVsAZZgCwCFYRcHALig6Zw21TTWytSN/DVr6mpU01hbcK9usj3DDsUljbRnxI8Mq3ddd9nXDACgcgwArqBXFwCCiXAMAC6hpQEAgoe2CgDwsXK1ZwAAnKFyDAA+RnsGAFQW4RgAfI72DACoHNoqAAAAgATCMQAAAJBAOAYAAAASCMcAAABAAuEYAAAASCAcAwAAAAmEYwAAACCBcAwAAAAkEI4BAACABMIxAAAAkEA4BgAAABIIxwAAAEAC4RgAAABIIBwDAAAACYRjAAAAIIFwDAAAACQQjgEAAIAEwjEAAACQQDgGAAAAEgjHAAAAQALhGAAAAEggHAMAAAAJhGMAAAAggXAMAAAAJBCOAQAAgATCMQAAAJBAOAYAAAASCMcAAABAAuEYAAAASCAcAwAAAAmEYwAAACCBcAwAAAAkEI4BAACABMIxAAAAkFBSODbGfMwY84IxJm6M6Uh77BvGmC3GmM3GmPeXtkwAAADAfbUlPv95SZdJui31ojHmDEmfkPRWScdK+qMx5lRrbazE9wMAAABcU1Ll2Fq7yVq7OcNDH5J0r7V2wFr7qqQtks4p5b0AAAAAt7nVc3ycpB0pX+9MXBvHGHO1MabTGNO5Z88el5YDAAAA5Je3rcIY80dJbRke+mdr7a9LXYC19nZJt0tSR0eHLfX1AAAAgGLlDcfW2vcW8bq7JM1K+Xpm4hoAAADgW261VTwk6RPGmHpjzEmSTpG0zqX3AgAAAMqipGkVxpiPSPqhpGmSVhljNlpr32+tfcEY80tJL0oalvQlJlUAABBcq7au0sq/rFR3b7famtq0/KzlWjh7odfLKs53j5MGD4+/Hm2Wvsl/6A47Y61/2nw7OjpsZ2en18sAAAApVm1dpRWrV6g/1j96bUJkglacv8JfAdlp6F0xubjXJzxXFWPMBmttR/p1TsgDAAA5rfzLyjHBWJL6Y/1a+ZeVHq0oi0zBONf1cr0+qgrhGAAA5NTd213QdV/6bsaJssA4hGMAAJBTW1Omia7Zr/sSVV84VOrx0QAA+N7ug/2aMWmC18sIrOVnLc/Yc7z8rOUersojbOareoRjAEBVu3/DTn3zwef08DUXas70iV4vJ5CSm+5cmVbhRdiMNhdfSXa7rxmeIxwDAKraxadNU0NdRJ/5yTp9/2NnyhijmS0NmnVMo9dLC5SFsxe6M5minGHTaejNFLqzhfRiXh+BRjgGAFS11uZ6ndE+SWu27tMn73hakrT8PafoK+871eOVhZyTMFqob+7KPaatHFXqYsfAITAIxwCAqrb38IBe7Dqo46Y06KaPvUNGI5VjeMytCmy26m6uqi/VYKQgHAMAqtqjm/eobyimB5aeR8+xX7g5Vi1XBZiqLxwgHAMAqtpH583Uu05pZVqFn/i9Upur/QJVj3AMAKh6BOMAqnQQdVJV9nuoR1kQjgEAgDuK3XS34kD51wI4RDgGAADuKHYcW7m4MREDVY9wDAAA/KOcB3/kCsap1Wk26iFFjdcLAAAAkMSGN/gClWMAAFB5QewrToZ3JllUNcIxAADwt3KcbJdLroNDytnmgUAgHAMAAHfkCp2FcPtkOwIwUhCOAQChtPtgv6P5x07vQwZeh85yhXOEChvyAAChc/+GnXrXjY9oyxuHynIffOqbu0Z6m1ccGBuIBw+PTKhYMdndo6wRSIRjAEDoXHzaNDXURfSZn6zT6i17teaVfdqx/0jR96XafbDfrWWjFG63ZqBqEI4BAKHT2lyvM9on6fUD/frkHU/ryv9Yq/s37Cz6viQqzUDw0XMMAAidvYcH9GLXQR03pUE3fewdMjKa2dJQ9H1JqZXm73/sTBkzcv+sYxrd/HaqH73DqCDCMQAgdB7dvEd9QzE9sPQ8zZk+0dF9EyfU5d2Yl6w0r9m6T5+842lJ0vL3nKKvvO/Usq4/dLze2IdQIRwDAELno/Nm6l2ntOYNu8n7nnh5r7754HN6+JoLc4bpTJXmCbV0MAJBwm8sACCUnI5nmzFpwmi7xFV3PJ1zY16y0nzn587W+Se3atebfbriP9bSg+wH2VowaM1AGmOt9XoNozo6OmxnZ6fXywAAYJz33PyYXtlztO81W7tE6lzkvYcH9J6bH1NTNEIPMuAzxpgN1tqO9OtUjgEAoZM+bi3f+LW9hwe051C/jJGmNtXpWx88XSdPa8pYPU6tSBc67QKA9wjHAADfcmNmcPq4NSfj1x7dvEf9w3G947jJ2tc7pOsf3qRr7t2YN+im9iD/4h/P1T3/OF8fnTezrN8PgPJiQx4AwJfu37DT0Sa4QqWPW2uur1V9bU3O8WsfnTdTbz12kj5x+1rHY90k51MxAPgH4RgA4EtuzQzONG5tZkuDdvb05Ry/9sLrBwsOuslpFwCCg3AMAPAlt2YGp49bO9g3rK/f/2zeirDT8W/pnI6BA+APhGMAgC8VejqdU+mtDvdv2KmB4bju/NzZecNrvmCcOqkiiVPzgGAhHAMAfMmtft30CnD618lNgIVWiLP1SHNqHhAshGMAgC8V28bgRPprJr++f8NOXffAX2WM9H+Xv6ugUJ6tQtwQjbhSAQfgDsIxAMC33AjGuVx82jQ1RiM6NDCsK25bo6UXzdH0SfV65/EtedsgslWIZx3TyMQKIEAIxwAAJLQ21+utx07Wmq37RucZS0fbIDL1FCdl6pGeUFejdx7f4loFHED5cQgIAAAJew8P6IXXD8gYqbUpqm8tPF23fGKuPjpvZt7DQpI90nd+7mydf3Krdr3ZpytuX6stbxwiGAMBQuUYAICERzfv0ZHBmGprjO794vwxbRAN0UjOqRPpPdJMqQCCyVhrvV7DqI6ODtvZ2en1MgAAIZZrWsWVt6/Vmq37Rr/ON3Wi0PsBVI4xZoO1tiP9OpVjAABSFNJTnGvqhFtzmgG4i3AMAIADhc5ddmtOMwB30VYBAIBDuaZVlON+AJWTra2CaRUAADhUaNAlGAPBQzgGAAAAEgjHAAAAQALhGAAAAEggHAMAAAAJhGMAAAAggXAMAAAAJBCOAQAAgATCMQAAAJBAOAYAAAASCMcAAABAAuEYAAAASCAcAwAAAAmEYwAAACCBcAwAAAAkEI4BAACABGOt9XoNo4wxeyRt9+jtWyXt9ei9UTp+fsHHzzD4+BkGHz/D4ONn6NwJ1tpp6Rd9FY69ZIzptNZ2eL0OFIefX/DxMww+fobBx88w+PgZlo62CgAAACCBcAwAAAAkEI6Put3rBaAk/PyCj59h8PEzDD5+hsHHz7BE9BwDAAAACVSOAQAAgIRQh2NjzMeMMS8YY+LGmI6U6ycaY/qMMRsTf271cp3ILtvPMPHYN4wxW4wxm40x7/dqjXDOGLPCGLMr5Xfvg16vCc4YYz6Q+F3bYoy5zuv1oDDGmG3GmOcSv3edXq8H+RljfmqMecMY83zKtWOMMX8wxryc+Nji5RqDKtThWNLzki6T9HiGx16x1s5N/FlS4XXBuYw/Q2PMGZI+Iemtkj4g6f8YYyKVXx6K8IOU372HvV4M8kv8bv1I0t9JOkPSlYnfQQTLJYnfO8aABcPPNPLvW6rrJP3JWnuKpD8lvkaBQh2OrbWbrLWbvV4HipfjZ/ghSfdaawesta9K2iLpnMquDgiNcyRtsdZutdYOSrpXI7+DAFxirX1c0v60yx+SdGfi8zslfbiSa6oWoQ7HeZxkjHnGGPOYMeZdXi8GBTtO0o6Ur3cmrsH/vmyM+WviPxnynwSDgd+34LOSfm+M2WCMudrrxaBoM6y1XYnPuyXN8HIxQVXr9QLcZoz5o6S2DA/9s7X211me1iXpeGvtPmPMPEn/ZYx5q7X2oGsLRVZF/gzhU7l+npJ+LOnbGvmH+tuSbpb0ucqtDgitC621u4wx0yX9wRjzt0RlEgFlrbXGGEaSFaHqw7G19r1FPGdA0kDi8w3GmFcknSqJTQoeKOZnKGmXpFkpX89MXIPHnP48jTH/Iem/XV4OyoPft4Cz1u5KfHzDGPOgRlplCMfBs9sY026t7TLGtEt6w+sFBRFtFRkYY6YlN28ZY2ZLOkXSVm9XhQI9JOkTxph6Y8xJGvkZrvN4Tcgj8Zd50kc0suES/rde0inGmJOMMVGNbIZ9yOM1wSFjTJMxZmLyc0kLxO9eUD0k6bOJzz8rif+6WoSqrxznYoz5iKQfSpomaZUxZqO19v2S3i3pfxljhiTFJS2x1qY3vcMHsv0MrbUvGGN+KelFScOSvmStjXm5VjhyozFmrkbaKrZJ+qKnq4Ej1tphY8yXJf1OUkTST621L3i8LDg3Q9KDxhhpJBf8wlr7W2+XhHyMMfdIulhSqzFmp6R/lfQ9Sb80xnxe0nZJH/duhcHFCXkAAABAAm0VAAAAQALhGAAAAEggHAMAAAAJhGMAAAAggXAMAAAAJBCOAQAAgATCMQAAAJBAOAYAAAAS/n+FlJND1MYtNAAAAABJRU5ErkJggg==\n"},"metadata":{"needs_background":"light"}}]},{"cell_type":"markdown","source":"# Error Analysis","metadata":{}},{"cell_type":"markdown","source":"## Plotting Outliers","metadata":{}},{"cell_type":"code","source":"###### Function to Retrieve an Example\n\ndef retrieve_example(cl_ind,cl_item_ind,cl_type):\n\n    \"\"\" \n    Function to retrieve a particular example indexed according to the STS Matching File\n\n    INPUTS:-\n    1)cl_ind : Class Index of the example\n    2)cl_item_ind : Index of the example amongst the class vector\n    3)cl_type : String flag between gal and probe\n\n    OUTPUTS:-\n    1)ret_example : The item shaped [sequence_len,1], that is to be retrieved\n\n    \"\"\"\n\n    ##### For Gallery's Example\n    \n    if(cl_type=='gal'):\n\n        #### Setting the Correct Gallery Class and ID\n        gal_class = cl_ind-1 # gal_class\n        gal_id = cl_item_ind - 1 # gal_id\n \n        #### Searching the required Example - Curating Examples from gal_class\n        item_index = []\n        for j in range(X_train.shape[0]):\n            if(y_train[j] == gal_class):\n                item_index.append(j)\n\n        ret_example = X_train[item_index[int(gal_id)]] # Retrieving the required\n\n    ##### For Probe Class\n\n    if(cl_type=='probe'):\n        \n        #### Setting the Correct Probe Class and ID\n        probe_class = cl_ind - 1 # probe_class\n        \n        gallery_index = []\n        for j in range(X_train.shape[0]):\n            if(y_train[j] == probe_class):\n                gallery_index.append(j)  \n\n        gal_len = len(gallery_index) \n\n        probe_id = cl_item_ind - gal_len - 1 # probe_id\n\n    #### Searching the required Example - Curating Examples from gal_class\n        item_index = []\n        for j in range(X_dev.shape[0]):\n            if(y_dev[j] == probe_class):\n                item_index.append(j) \n\n        ret_example = X_dev[item_index[int(probe_id)]] # Retrieving the required\n\n    ##### Returning the Example\n\n    return ret_example","metadata":{"execution":{"iopub.status.busy":"2021-07-23T14:03:25.329212Z","iopub.execute_input":"2021-07-23T14:03:25.329547Z","iopub.status.idle":"2021-07-23T14:03:25.33928Z","shell.execute_reply.started":"2021-07-23T14:03:25.329518Z","shell.execute_reply":"2021-07-23T14:03:25.338271Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"###### Making the Plot Storing Directory\n! mkdir './Outlier Plots'","metadata":{"execution":{"iopub.status.busy":"2021-07-20T08:09:18.798264Z","iopub.execute_input":"2021-07-20T08:09:18.798776Z","iopub.status.idle":"2021-07-20T08:09:19.550609Z","shell.execute_reply.started":"2021-07-20T08:09:18.798743Z","shell.execute_reply":"2021-07-20T08:09:19.548441Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"###### Plotting the Outliers\n\n##### Fetching the Examples\n\nitem_1 = retrieve_example(34,20,'probe') \nitem_2 = retrieve_example(60,6,'gal')\nitem_3 = retrieve_example(85,8,'gal')\n\n##### Plotting the Requires \n\nplt.rcParams[\"figure.figsize\"] = [16,8]\nplt.plot(np.arange(512),item_1)\nplt.plot(np.arange(512),item_2)\nplt.plot(np.arange(512),item_3)\nplt.xlabel('T')\nplt.ylabel('V')\nplt.title('Ratio : '+str(6))\nplt.legend([str(j) for j in range(3)])\n#plt.savefig('./Outlier Plots/OP_5.png')\nplt.show()","metadata":{"execution":{"iopub.status.busy":"2021-07-23T14:03:33.6482Z","iopub.execute_input":"2021-07-23T14:03:33.648692Z","iopub.status.idle":"2021-07-23T14:03:34.02824Z","shell.execute_reply.started":"2021-07-23T14:03:33.648662Z","shell.execute_reply":"2021-07-23T14:03:34.027106Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"code","source":"###### Zipping the Outlier Plots Folder\nshutil.make_archive(\"Outlier_Plots_ECG1D_Incremental\", \"zip\", \"./Outlier Plots\")","metadata":{"execution":{"iopub.status.busy":"2021-07-20T08:15:40.362929Z","iopub.execute_input":"2021-07-20T08:15:40.363325Z","iopub.status.idle":"2021-07-20T08:15:40.393395Z","shell.execute_reply.started":"2021-07-20T08:15:40.363285Z","shell.execute_reply":"2021-07-20T08:15:40.392449Z"},"trusted":true},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":"## Bandpass Matching","metadata":{}},{"cell_type":"code","source":"####### Filtering a specific Signal\n\n##### Plotting the Filtered Ouptut\nbp_filter = scipy.signal.butter(4,(1/180,110/180),'bp',output='sos')\nfiltered_op = scipy.signal.sosfilt(bp_filter,item_1)\nplt.plot(np.arange(512),item_1)\nplt.plot(np.arange(512),filtered_op)\nplt.show()","metadata":{"execution":{"iopub.status.busy":"2021-07-23T14:10:47.352398Z","iopub.execute_input":"2021-07-23T14:10:47.352713Z","iopub.status.idle":"2021-07-23T14:10:47.509937Z","shell.execute_reply.started":"2021-07-23T14:10:47.352685Z","shell.execute_reply":"2021-07-23T14:10:47.508935Z"},"trusted":true},"execution_count":null,"outputs":[]}]}