{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 125,
   "metadata": {},
   "outputs": [],
   "source": [
    "#Importing data used in Ch2 to be revisted in systems of linear equations:\n",
    "D = dict(\n",
    "{'baths': np.array([2, 2, 2, 2, 1, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2,\n",
    "       2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2,\n",
    "       2, 2, 2, 2, 1, 3, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2,\n",
    "       3, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 1, 3, 2, 2, 2, 2, 2, 3, 2, 1, 2,\n",
    "       1, 3, 2, 3, 2, 2, 2, 1, 3, 2, 2, 3, 2, 3, 4, 3, 1, 3, 3, 2, 2, 3,\n",
    "       2, 3, 3, 2, 3, 3, 2, 2, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3, 4, 2, 3, 1,\n",
    "       2, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 2, 1,\n",
    "       2, 2, 1, 1, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2,\n",
    "       1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 3, 1,\n",
    "       2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 3, 1, 2, 2, 2, 2, 2,\n",
    "       2, 2, 2, 3, 3, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 1, 3, 2, 2, 2, 2, 2,\n",
    "       2, 1, 3, 1, 2, 2, 2, 2, 2, 3, 4, 2, 2, 3, 4, 2, 1, 2, 1, 3, 4, 3,\n",
    "       2, 3, 3, 2, 3, 5, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1,\n",
    "       1, 1, 1, 2, 1, 1, 2, 1, 1, 2, 2, 1, 2, 1, 2, 2, 2, 1, 2, 2, 1, 2,\n",
    "       2, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1,\n",
    "       2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2,\n",
    "       1, 2, 1, 2, 3, 2, 1, 2, 2, 2, 3, 1, 2, 2, 1, 3, 2, 1, 2, 2, 2, 2,\n",
    "       1, 3, 2, 2, 1, 2, 2, 3, 2, 2, 2, 3, 2, 3, 2, 2, 3, 2, 2, 1, 2, 2,\n",
    "       2, 2, 2, 3, 2, 2, 2, 1, 2, 3, 2, 3, 2, 3, 2, 2, 2, 3, 2, 2, 2, 2,\n",
    "       3, 2, 2, 2, 3, 3, 3, 2, 3, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 3, 2,\n",
    "       3, 3, 3, 2, 2, 4, 3, 2, 4, 3, 2, 2, 2, 2, 3, 3, 3, 3, 1, 1, 1, 1,\n",
    "       1, 2, 1, 1, 1, 1, 2, 1, 1, 2, 1, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2,\n",
    "       2, 2, 2, 2, 3, 2, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,\n",
    "       2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,\n",
    "       2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 3, 1, 1, 2, 1, 1, 2, 2, 2, 2, 2, 2,\n",
    "       2, 1, 2, 2, 1, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2,\n",
    "       3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2,\n",
    "       2, 3, 2, 3, 3, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 1, 2, 2, 3, 2, 3,\n",
    "       2, 2, 2, 3, 2, 3, 2, 3, 3, 2, 2, 3, 2, 2, 3, 2, 3, 3, 2, 2, 3, 3,\n",
    "       2, 2, 2, 2, 2, 3, 1, 2, 2, 2, 3, 2, 1, 2, 3, 1, 3, 3, 3, 2, 3, 2,\n",
    "       1, 1, 3, 4, 3, 3, 2, 1, 1, 1, 1, 1, 2, 2, 1, 2, 1, 2, 2, 1, 1, 1,\n",
    "       1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 1, 2, 1, 2, 1, 1,\n",
    "       2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2,\n",
    "       2, 1, 2, 1, 1, 3, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 3, 2, 2, 2, 1, 2,\n",
    "       1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 3,\n",
    "       2, 2, 2, 2]), 'location': np.array([2, 2, 2, 3, 3, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 3, 3, 2, 3, 3,\n",
    "       2, 2, 2, 3, 3, 2, 3, 3, 2, 2, 3, 3, 2, 2, 2, 3, 3, 2, 3, 3, 3, 3,\n",
    "       3, 3, 3, 3, 2, 3, 2, 3, 2, 3, 3, 3, 3, 2, 3, 3, 4, 2, 2, 3, 3, 2,\n",
    "       3, 3, 3, 2, 2, 2, 3, 2, 2, 3, 3, 3, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2,\n",
    "       3, 3, 3, 3, 3, 3, 3, 1, 3, 3, 2, 2, 3, 2, 2, 3, 2, 3, 3, 4, 3, 3,\n",
    "       3, 3, 2, 3, 3, 2, 3, 2, 1, 3, 4, 3, 3, 3, 2, 4, 3, 3, 3, 4, 2, 2,\n",
    "       1, 3, 2, 3, 3, 2, 2, 3, 3, 3, 2, 3, 2, 3, 3, 3, 2, 2, 2, 3, 2, 2,\n",
    "       3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 3, 3, 3, 2, 3, 3, 3, 3, 3, 2, 2, 3,\n",
    "       2, 2, 3, 2, 3, 2, 2, 3, 3, 2, 3, 3, 2, 3, 2, 2, 3, 2, 3, 2, 3, 3,\n",
    "       2, 2, 3, 2, 2, 3, 3, 2, 4, 2, 3, 2, 2, 3, 2, 3, 1, 2, 2, 4, 2, 3,\n",
    "       3, 2, 3, 2, 2, 2, 3, 3, 3, 2, 2, 4, 2, 4, 2, 2, 3, 2, 2, 3, 4, 4,\n",
    "       3, 3, 2, 1, 4, 3, 4, 3, 1, 3, 3, 4, 4, 3, 3, 2, 1, 4, 2, 3, 2, 2,\n",
    "       2, 3, 3, 2, 4, 2, 1, 2, 1, 3, 2, 3, 3, 2, 2, 2, 3, 3, 3, 3, 2, 2,\n",
    "       2, 3, 2, 2, 2, 2, 3, 2, 3, 2, 4, 3, 2, 3, 2, 3, 3, 2, 3, 3, 2, 2,\n",
    "       2, 3, 4, 3, 2, 3, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 3, 4,\n",
    "       2, 2, 3, 3, 2, 2, 3, 2, 2, 2, 3, 2, 3, 3, 3, 2, 4, 4, 2, 2, 3, 2,\n",
    "       2, 3, 2, 2, 3, 3, 2, 3, 3, 2, 1, 4, 4, 2, 3, 3, 3, 2, 3, 3, 2, 3,\n",
    "       2, 2, 3, 2, 3, 2, 3, 3, 4, 2, 3, 2, 3, 2, 3, 3, 2, 3, 2, 2, 2, 2,\n",
    "       2, 3, 4, 2, 2, 4, 2, 2, 3, 3, 3, 2, 3, 1, 2, 4, 2, 3, 3, 2, 3, 3,\n",
    "       2, 2, 3, 3, 3, 2, 2, 2, 3, 2, 3, 3, 3, 2, 2, 2, 1, 4, 3, 1, 2, 1,\n",
    "       2, 4, 1, 4, 3, 3, 2, 4, 2, 3, 4, 4, 2, 3, 4, 2, 4, 3, 3, 2, 2, 2,\n",
    "       1, 3, 2, 3, 3, 2, 3, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 3, 3, 3, 2, 2,\n",
    "       2, 3, 3, 2, 2, 2, 3, 2, 3, 3, 3, 2, 3, 2, 2, 3, 3, 2, 2, 3, 3, 3,\n",
    "       3, 3, 2, 3, 2, 3, 2, 3, 3, 3, 2, 2, 3, 2, 2, 4, 3, 2, 4, 3, 3, 3,\n",
    "       3, 2, 3, 2, 2, 4, 3, 2, 3, 2, 2, 2, 3, 2, 2, 2, 3, 2, 2, 3, 2, 2,\n",
    "       2, 2, 3, 3, 4, 2, 1, 1, 2, 3, 2, 3, 3, 2, 2, 3, 3, 4, 2, 2, 3, 2,\n",
    "       2, 3, 3, 2, 3, 2, 2, 2, 2, 2, 3, 3, 2, 3, 3, 2, 4, 2, 2, 4, 2, 3,\n",
    "       3, 2, 4, 3, 2, 2, 2, 3, 3, 3, 2, 2, 2, 3, 4, 3, 1, 2, 4, 3, 4, 3,\n",
    "       2, 2, 4, 4, 3, 4, 2, 4, 2, 2, 3, 2, 4, 3, 2, 2, 2, 2, 2, 2, 4, 3,\n",
    "       2, 4, 4, 3, 4, 4, 3, 3, 2, 4, 3, 4, 1, 2, 4, 1, 4, 3, 4, 2, 4, 2,\n",
    "       2, 1, 4, 3, 4, 4, 3, 3, 3, 2, 3, 2, 2, 2, 2, 3, 3, 2, 3, 2, 2, 3,\n",
    "       2, 2, 3, 3, 3, 3, 3, 3, 2, 1, 3, 3, 3, 3, 2, 3, 2, 2, 2, 3, 2, 1,\n",
    "       3, 2, 3, 2, 3, 3, 2, 2, 3, 1, 3, 2, 2, 4, 3, 3, 3, 2, 3, 3, 2, 3,\n",
    "       3, 2, 2, 3, 2, 2, 2, 2, 3, 3, 2, 2, 3, 3, 2, 3, 2, 1, 2, 3, 2, 4,\n",
    "       2, 3, 3, 2, 3, 3, 3, 2, 3, 3, 2, 3, 2, 2, 2, 2, 3, 4, 3, 3, 3, 3,\n",
    "       3, 2, 3, 4]), 'price': np.array([ 94.905,  98.937, 100.309, 106.25 , 107.502, 108.75 , 110.7  ,\n",
    "       113.263, 116.25 , 120.   , 121.63 , 122.   , 122.682, 123.   ,\n",
    "       124.1  , 125.   , 126.64 , 127.281, 129.   , 131.2  , 132.   ,\n",
    "       133.   , 134.555, 136.5  , 138.75 , 141.   , 146.25 , 147.308,\n",
    "       148.75 , 149.593, 150.   , 152.   , 154.   , 156.896, 161.25 ,\n",
    "       161.5  , 164.   , 165.   , 166.357, 166.357, 168.   , 170.   ,\n",
    "       173.   , 174.25 , 174.313, 178.48 , 178.76 , 181.   , 181.872,\n",
    "       182.587, 182.716, 182.75 , 183.2  , 188.741, 189.   , 192.067,\n",
    "       194.   , 194.818, 198.   , 199.5  , 200.   , 200.   , 208.   ,\n",
    "       212.864, 221.   , 221.   , 223.058, 227.887, 231.477, 234.697,\n",
    "       235.   , 236.   , 236.685, 237.8  , 240.122, 242.638, 244.   ,\n",
    "       244.96 , 245.918, 250.   , 250.   , 250.134, 254.2  , 254.2  ,\n",
    "       258.   , 260.   , 260.014, 265.   , 271.742, 273.75 , 275.086,\n",
    "       280.987, 285.   , 287.417, 291.   , 292.024, 297.   , 298.   ,\n",
    "       299.   , 304.037, 311.   , 315.537, 320.   , 328.36 , 334.15 ,\n",
    "       335.75 , 335.75 , 344.25 , 346.21 , 347.029, 347.65 , 351.3  ,\n",
    "       370.5  , 372.   , 375.   , 381.3  , 381.942, 387.731, 391.   ,\n",
    "       394.47 , 395.   , 400.186, 415.   , 425.   , 430.   , 460.   ,\n",
    "       461.   , 489.332, 510.   , 539.   , 660.   ,  69.   ,  70.   ,\n",
    "        71.   ,  78.   ,  78.4  ,  80.   ,  89.   ,  90.   ,  90.   ,\n",
    "        92.   ,  93.675,  98.   ,  98.   ,  99.   , 100.   , 106.716,\n",
    "       111.   , 111.   , 114.8  , 120.108, 123.225, 123.75 , 125.   ,\n",
    "       125.   , 126.   , 129.   , 134.   , 135.   , 135.5  , 140.   ,\n",
    "       140.   , 142.5  , 143.5  , 145.   , 145.   , 145.   , 146.   ,\n",
    "       148.5  , 149.   , 150.   , 150.   , 152.   , 156.   , 156.   ,\n",
    "       156.   , 157.788, 161.653, 161.829, 165.   , 168.   , 169.   ,\n",
    "       175.   , 176.25 , 179.   , 180.   , 180.4  , 182.   , 184.5  ,\n",
    "       185.   , 189.   , 194.   , 195.   , 200.   , 205.   , 205.   ,\n",
    "       205.   , 207.   , 215.   , 215.   , 222.381, 225.   , 225.   ,\n",
    "       225.   , 228.   , 229.665, 230.   , 230.   , 230.   , 234.   ,\n",
    "       235.   , 236.25 , 245.   , 245.   , 245.   , 250.   , 250.   ,\n",
    "       250.   , 255.   , 257.729, 260.   , 261.   , 264.469, 265.   ,\n",
    "       270.   , 270.   , 275.   , 275.   , 280.   , 286.013, 292.   ,\n",
    "       292.   , 293.993, 294.   , 296.769, 300.   , 300.   , 300.5  ,\n",
    "       305.   , 319.789, 330.   , 330.   , 331.   , 334.   , 336.   ,\n",
    "       339.   , 339.   , 345.   , 356.   , 361.745, 361.948, 370.   ,\n",
    "       385.   , 399.   , 402.   , 406.026, 420.   , 425.   , 445.   ,\n",
    "       450.   , 460.   , 460.   , 465.   , 471.75 , 484.   , 495.   ,\n",
    "       572.5  , 582.   , 613.401, 680.   , 699.   ,  61.5  ,  62.05 ,\n",
    "        65.   ,  65.   ,  68.   ,  68.   ,  77.   ,  82.732,  84.   ,\n",
    "        84.675,  85.   ,  90.   ,  90.   ,  91.   ,  95.   ,  97.5  ,\n",
    "       100.   , 101.   , 102.75 , 112.5  , 113.   , 114.   , 114.   ,\n",
    "       114.75 , 115.   , 115.   , 116.1  , 119.25 , 120.   , 120.   ,\n",
    "       120.108, 121.5  , 121.725, 122.   , 123.   , 125.   , 125.573,\n",
    "       126.714, 126.96 , 127.   , 127.5  , 130.   , 133.105, 136.5  ,\n",
    "       139.5  , 140.   , 140.8  , 145.   , 147.   , 149.6  , 150.   ,\n",
    "       150.   , 155.   , 155.435, 155.5  , 158.   , 158.   , 160.   ,\n",
    "       160.   , 164.   , 164.   , 165.   , 167.   , 167.293, 167.293,\n",
    "       168.   , 170.   , 170.   , 170.   , 174.   , 178.   , 180.   ,\n",
    "       180.   , 180.   , 182.   , 188.325, 191.5  , 192.   , 192.7  ,\n",
    "       195.   , 197.654, 198.   , 200.345, 203.   , 207.   , 208.   ,\n",
    "       210.   , 212.   , 213.675, 213.697, 215.   , 215.   , 215.1  ,\n",
    "       217.5  , 218.   , 220.   , 221.   , 222.9  , 223.139, 225.5  ,\n",
    "       228.327, 230.   , 230.   , 230.522, 231.2  , 232.   , 232.5  ,\n",
    "       233.641, 234.   , 234.5  , 235.   , 236.073, 238.   , 238.861,\n",
    "       239.7  , 240.   , 240.   , 241.   , 245.   , 246.   , 247.234,\n",
    "       247.48 , 249.862, 251.   , 252.155, 254.172, 258.   , 260.   ,\n",
    "       261.   , 261.   , 262.5  , 266.   , 266.   , 270.   , 274.425,\n",
    "       275.336, 277.98 , 280.   , 284.686, 284.893, 285.   , 285.   ,\n",
    "       295.   , 296.   , 296.056, 297.359, 299.94 , 305.   , 311.328,\n",
    "       313.138, 316.63 , 320.   , 320.   , 325.   , 328.578, 331.   ,\n",
    "       340.   , 345.746, 351.   , 353.767, 356.035, 360.552, 362.305,\n",
    "       365.   , 370.   , 378.   , 388.   , 395.1  , 400.   , 408.431,\n",
    "       423.   , 427.5  , 430.922, 445.   , 450.   , 452.   , 470.   ,\n",
    "       475.   , 484.5  , 500.   , 506.688, 528.   , 579.093, 636.   ,\n",
    "       668.365, 676.2  , 691.659,  55.422,  63.   ,  65.   ,  65.   ,\n",
    "        65.   ,  66.5  ,  71.   ,  75.   ,  77.   ,  85.   ,  95.625,\n",
    "        96.14 , 104.25 , 105.   , 108.   , 109.   , 115.   , 115.   ,\n",
    "       115.5  , 115.62 , 116.   , 122.   , 122.5  , 123.   , 124.   ,\n",
    "       124.   , 124.413, 125.   , 130.   , 131.75 , 137.721, 137.76 ,\n",
    "       138.   , 140.   , 145.   , 145.   , 150.   , 150.   , 151.   ,\n",
    "       155.   , 155.8  , 156.142, 158.   , 160.   , 161.5  , 161.6  ,\n",
    "       162.   , 165.   , 165.   , 167.293, 168.   , 168.   , 168.75 ,\n",
    "       168.75 , 170.   , 170.25 , 173.   , 176.095, 176.25 , 178.   ,\n",
    "       179.   , 180.   , 180.   , 180.   , 181.   , 182.   , 182.587,\n",
    "       185.074, 185.833, 186.785, 187.   , 188.335, 190.   , 190.   ,\n",
    "       190.   , 190.   , 191.25 , 193.   , 193.5  , 195.   , 195.   ,\n",
    "       195.   , 198.   , 199.9  , 200.   , 201.   , 204.918, 205.   ,\n",
    "       205.878, 207.   , 207.744, 209.   , 210.   , 210.944, 213.75 ,\n",
    "       215.   , 215.   , 220.   , 220.   , 220.   , 220.   , 220.   ,\n",
    "       220.702, 222.   , 222.75 , 225.   , 225.   , 228.75 , 229.   ,\n",
    "       230.095, 232.5  , 233.   , 233.5  , 239.   , 240.   , 240.   ,\n",
    "       240.971, 242.   , 243.45 , 243.5  , 246.544, 246.75 , 247.   ,\n",
    "       249.   , 249.   , 250.   , 250.   , 252.   , 255.   , 255.   ,\n",
    "       255.   , 257.2  , 260.   , 260.   , 263.5  , 266.51 , 275.   ,\n",
    "       276.   , 276.5  , 278.   , 279.   , 280.   , 280.   , 285.   ,\n",
    "       288.   , 289.   , 290.   , 290.   , 293.996, 294.173, 295.   ,\n",
    "       298.   , 299.   , 300.   , 300.   , 300.   , 300.567, 303.   ,\n",
    "       305.   , 310.   , 310.   , 310.   , 311.518, 312.   , 315.   ,\n",
    "       315.   , 315.   , 315.   , 320.   , 322.   , 325.   , 328.37 ,\n",
    "       330.   , 331.2  , 332.   , 334.   , 335.   , 341.   , 346.375,\n",
    "       349.   , 350.   , 350.   , 350.   , 351.   , 360.   , 367.463,\n",
    "       380.   , 380.578, 386.222, 395.5  , 397.   , 400.   , 413.5  ,\n",
    "       415.   , 420.454, 425.   , 441.   , 445.   , 446.   , 450.   ,\n",
    "       455.   , 525.   , 545.   , 575.   , 575.   , 598.695, 600.   ,\n",
    "       610.   ,  56.95 ,  60.   ,  61.   ,  62.   ,  68.566,  70.   ,\n",
    "        80.   ,  85.5  ,  92.   ,  93.6  ,  95.   ,  97.75 , 104.   ,\n",
    "       105.   , 107.666, 109.   , 110.   , 110.   , 112.5  , 114.8  ,\n",
    "       116.   , 121.5  , 122.   , 123.675, 126.854, 127.059, 128.687,\n",
    "       129.5  , 130.   , 131.75 , 132.   , 134.   , 134.   , 142.   ,\n",
    "       143.012, 145.846, 147.   , 148.75 , 150.   , 150.454, 151.087,\n",
    "       157.296, 157.5  , 160.   , 160.   , 161.25 , 164.   , 165.   ,\n",
    "       165.75 , 166.   , 169.   , 170.   , 170.   , 170.725, 171.75 ,\n",
    "       172.   , 173.056, 174.   , 174.25 , 176.85 , 179.5  , 185.   ,\n",
    "       188.7  , 189.   , 189.   , 189.836, 190.   , 191.25 , 191.675,\n",
    "       195.5  , 198.   , 200.   , 200.   , 200.   , 201.528, 204.75 ,\n",
    "       205.   , 205.   , 205.9  , 207.   , 207.973, 208.25 , 208.318,\n",
    "       209.347, 211.5  , 212.   , 213.   , 216.   , 216.021, 219.   ,\n",
    "       219.794, 220.   , 220.   , 220.   , 223.   , 224.   , 224.252,\n",
    "       225.   , 228.   , 229.027, 229.5  , 230.   , 230.   , 232.425,\n",
    "       234.   , 235.   , 235.301, 235.738]), 'beds': np.array([2, 3, 3, 3, 3, 2, 2, 2, 2, 2, 3, 3, 4, 4, 3, 3, 3, 3, 3, 4, 3, 2,\n",
    "       3, 3, 3, 2, 3, 3, 4, 4, 1, 2, 3, 3, 2, 4, 4, 4, 4, 4, 3, 3, 4, 3,\n",
    "       4, 3, 3, 3, 3, 4, 3, 3, 4, 3, 3, 3, 3, 3, 3, 4, 3, 3, 3, 4, 4, 2,\n",
    "       5, 4, 3, 3, 4, 4, 4, 3, 3, 3, 3, 3, 5, 3, 4, 2, 3, 4, 5, 3, 3, 3,\n",
    "       3, 4, 3, 4, 4, 4, 3, 2, 5, 3, 2, 5, 4, 5, 5, 4, 2, 3, 4, 3, 4, 3,\n",
    "       4, 4, 4, 3, 5, 5, 4, 4, 3, 4, 3, 4, 3, 5, 5, 5, 4, 4, 5, 4, 3, 2,\n",
    "       4, 2, 2, 2, 3, 4, 3, 2, 3, 2, 2, 1, 3, 1, 3, 3, 3, 2, 3, 3, 3, 3,\n",
    "       3, 3, 3, 2, 3, 3, 3, 3, 2, 3, 2, 4, 3, 4, 3, 3, 3, 4, 2, 3, 2, 2,\n",
    "       2, 3, 2, 3, 3, 3, 3, 3, 3, 2, 3, 4, 4, 3, 4, 3, 3, 3, 3, 4, 4, 2,\n",
    "       4, 3, 3, 3, 4, 3, 4, 4, 3, 2, 3, 3, 3, 3, 3, 4, 2, 3, 4, 3, 3, 3,\n",
    "       4, 3, 4, 4, 4, 3, 4, 4, 4, 3, 4, 3, 3, 3, 3, 3, 4, 4, 3, 4, 2, 3,\n",
    "       3, 2, 4, 3, 4, 4, 3, 4, 4, 5, 5, 3, 4, 4, 5, 4, 2, 4, 3, 4, 4, 5,\n",
    "       2, 4, 5, 4, 4, 5, 5, 3, 2, 2, 2, 2, 2, 2, 2, 3, 2, 3, 3, 2, 2, 3,\n",
    "       2, 1, 2, 4, 3, 3, 4, 3, 2, 2, 2, 3, 3, 2, 2, 3, 2, 3, 3, 3, 2, 4,\n",
    "       4, 2, 2, 4, 3, 2, 2, 3, 3, 3, 3, 4, 3, 5, 3, 3, 3, 3, 4, 3, 3, 2,\n",
    "       2, 3, 2, 3, 4, 4, 3, 3, 4, 3, 3, 3, 3, 3, 4, 2, 3, 3, 3, 3, 3, 3,\n",
    "       3, 3, 2, 4, 4, 4, 3, 4, 2, 4, 4, 3, 3, 3, 2, 4, 3, 3, 4, 3, 4, 3,\n",
    "       2, 4, 3, 2, 3, 3, 2, 4, 3, 3, 4, 4, 4, 5, 4, 3, 4, 4, 4, 3, 3, 4,\n",
    "       3, 4, 2, 4, 4, 4, 4, 3, 3, 4, 3, 4, 4, 3, 3, 3, 4, 5, 4, 4, 3, 4,\n",
    "       4, 4, 3, 3, 5, 5, 5, 4, 3, 4, 3, 4, 4, 4, 4, 3, 3, 2, 3, 3, 5, 3,\n",
    "       4, 4, 2, 3, 4, 4, 5, 3, 4, 3, 3, 3, 4, 5, 4, 4, 5, 5, 2, 2, 2, 2,\n",
    "       2, 2, 2, 2, 2, 2, 3, 2, 2, 4, 2, 4, 2, 1, 3, 3, 3, 3, 3, 4, 3, 4,\n",
    "       3, 4, 3, 2, 4, 3, 3, 3, 3, 2, 3, 3, 4, 3, 3, 3, 4, 3, 3, 4, 3, 4,\n",
    "       3, 3, 5, 4, 3, 3, 3, 3, 3, 3, 3, 3, 4, 3, 3, 3, 3, 3, 3, 4, 3, 4,\n",
    "       3, 3, 3, 3, 4, 2, 3, 3, 3, 4, 4, 3, 3, 3, 3, 3, 4, 3, 3, 4, 4, 3,\n",
    "       3, 1, 4, 3, 4, 4, 2, 2, 4, 3, 2, 3, 5, 3, 4, 4, 3, 3, 5, 3, 4, 3,\n",
    "       4, 4, 3, 2, 3, 4, 3, 4, 3, 3, 4, 3, 3, 4, 3, 3, 3, 4, 3, 3, 3, 3,\n",
    "       3, 5, 3, 4, 5, 3, 4, 3, 3, 4, 3, 4, 4, 4, 4, 4, 2, 2, 3, 6, 3, 5,\n",
    "       3, 3, 3, 4, 4, 5, 4, 4, 5, 3, 3, 4, 4, 3, 4, 4, 4, 3, 3, 4, 5, 5,\n",
    "       3, 3, 3, 3, 4, 4, 3, 3, 3, 3, 5, 4, 2, 3, 4, 3, 5, 5, 4, 4, 4, 3,\n",
    "       3, 2, 4, 5, 4, 5, 4, 1, 2, 2, 2, 2, 4, 3, 2, 2, 3, 3, 3, 3, 1, 3,\n",
    "       3, 3, 3, 3, 3, 4, 3, 2, 4, 3, 3, 3, 3, 4, 2, 3, 3, 3, 3, 3, 2, 3,\n",
    "       4, 2, 3, 3, 3, 3, 3, 4, 3, 2, 4, 3, 3, 3, 3, 3, 2, 3, 2, 3, 3, 4,\n",
    "       4, 3, 3, 3, 3, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 2, 3, 2, 2, 3,\n",
    "       3, 3, 4, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 3, 3, 3, 3, 3, 4, 4, 3, 4,\n",
    "       3, 3, 4, 3]), 'area': np.array([0.941, 1.146, 0.909, 1.289, 1.02 , 1.022, 1.134, 0.844, 0.795,\n",
    "       0.588, 1.356, 1.118, 1.329, 1.24 , 1.601, 0.901, 1.088, 0.963,\n",
    "       1.119, 1.38 , 1.248, 1.039, 1.152, 1.38 , 1.116, 1.039, 1.418,\n",
    "       1.082, 1.472, 1.146, 0.76 , 1.304, 1.207, 1.056, 1.043, 1.587,\n",
    "       1.12 , 1.58 , 1.955, 1.656, 1.477, 1.188, 1.59 , 1.463, 1.714,\n",
    "       1.185, 1.406, 1.172, 1.152, 1.851, 1.215, 1.13 , 1.603, 1.479,\n",
    "       1.42 , 1.28 , 1.586, 1.362, 1.266, 1.715, 1.82 , 0.936, 1.511,\n",
    "       1.59 , 1.596, 1.341, 2.136, 1.616, 1.478, 1.287, 1.277, 1.448,\n",
    "       2.235, 2.093, 1.193, 2.163, 1.269, 0.958, 2.508, 1.305, 1.591,\n",
    "       1.326, 1.843, 1.921, 2.79 , 1.541, 1.018, 1.672, 0.975, 2.372,\n",
    "       1.446, 3.009, 2.056, 1.993, 1.857, 1.126, 2.494, 1.843, 1.52 ,\n",
    "       2.8  , 2.309, 2.367, 3.516, 1.914, 1.69 , 2.725, 2.354, 2.185,\n",
    "       1.801, 1.961, 3.134, 1.915, 2.734, 2.11 , 3.164, 3.599, 2.054,\n",
    "       1.83 , 1.627, 3.44 , 2.846, 2.359, 2.052, 3.433, 3.615, 2.687,\n",
    "       2.724, 3.44 , 3.508, 2.462, 2.325, 0.795, 1.099, 0.84 , 0.8  ,\n",
    "       0.746, 1.067, 1.316, 1.337, 0.868, 0.924, 0.61 , 1.22 , 0.722,\n",
    "       1.643, 0.722, 1.08 , 1.039, 1.051, 0.967, 1.098, 1.05 , 1.11 ,\n",
    "       0.888, 1.12 , 1.08 , 0.957, 0.952, 1.211, 1.264, 1.08 , 1.266,\n",
    "       0.994, 1.202, 0.722, 1.448, 1.188, 1.183, 1.32 , 1.117, 1.364,\n",
    "       1.31 , 1.006, 1.104, 0.81 , 1.123, 0.904, 1.156, 1.321, 1.392,\n",
    "       1.439, 1.159, 1.671, 1.74 , 1.265, 1.007, 1.716, 1.685, 1.829,\n",
    "       1.555, 1.12 , 1.137, 1.174, 1.393, 1.289, 1.799, 1.953, 0.723,\n",
    "       1.578, 1.317, 1.36 , 1.522, 1.751, 1.465, 1.605, 1.475, 1.216,\n",
    "       1.315, 1.567, 1.776, 2.187, 1.291, 1.503, 2.491, 1.269, 1.176,\n",
    "       1.456, 1.498, 1.574, 2.17 , 1.595, 1.567, 1.253, 1.768, 2.03 ,\n",
    "       1.531, 1.653, 2.056, 2.494, 1.45 , 2.169, 1.44 , 1.527, 1.401,\n",
    "       1.411, 1.284, 2.307, 1.91 , 1.981, 2.205, 1.449, 1.258, 2.575,\n",
    "       0.539, 2.208, 1.108, 1.595, 2.159, 1.838, 1.9  , 1.718, 3.389,\n",
    "       3.26 , 2.016, 2.607, 2.724, 3.746, 3.192, 1.247, 2.581, 2.068,\n",
    "       3.992, 3.397, 3.881, 1.598, 3.07 , 3.984, 2.222, 3.838, 2.846,\n",
    "       2.484, 0.97 , 0.623, 0.932, 0.796, 0.834, 0.834, 0.924, 0.795,\n",
    "       1.25 , 0.984, 1.013, 1.012, 0.795, 0.918, 1.082, 0.964, 0.625,\n",
    "       0.888, 1.12 , 1.331, 1.014, 1.448, 0.966, 0.779, 0.836, 1.1  ,\n",
    "       1.174, 1.207, 0.804, 0.958, 1.366, 0.901, 0.696, 1.08 , 1.104,\n",
    "       0.972, 1.39 , 1.354, 0.795, 0.78 , 1.587, 1.209, 1.139, 1.69 ,\n",
    "       1.245, 1.416, 1.3  , 1.12 , 1.59 , 1.407, 1.516, 1.646, 1.676,\n",
    "       1.37 , 1.37 , 1.351, 1.152, 1.452, 0.99 , 1.162, 1.182, 1.112,\n",
    "       1.1  , 1.28 , 1.28 , 1.039, 1.159, 1.917, 1.52 , 1.204, 1.12 ,\n",
    "       1.436, 1.451, 1.638, 1.   , 1.152, 1.154, 1.353, 1.329, 1.356,\n",
    "       1.505, 1.009, 1.144, 0.93 , 1.766, 1.94 , 1.776, 1.258, 1.872,\n",
    "       1.112, 1.856, 1.939, 0.998, 1.758, 2.142, 0.95 , 1.739, 1.516,\n",
    "       0.988, 1.555, 1.212, 1.871, 1.302, 0.756, 2.026, 1.375, 1.25 ,\n",
    "       1.058, 1.187, 1.324, 1.936, 1.427, 1.678, 1.798, 2.652, 1.816,\n",
    "       3.076, 1.844, 1.306, 2.447, 1.176, 1.182, 1.16 , 1.424, 1.574,\n",
    "       1.83 , 1.724, 1.255, 2.175, 1.904, 1.808, 2.711, 1.713, 1.457,\n",
    "       2.724, 1.468, 2.55 , 1.928, 1.922, 1.343, 1.51 , 1.559, 2.992,\n",
    "       2.109, 1.524, 1.248, 1.876, 1.851, 2.218, 1.394, 1.41 , 3.468,\n",
    "       2.346, 2.347, 1.659, 2.442, 2.155, 1.81 , 2.789, 1.606, 2.166,\n",
    "       1.871, 1.8  , 1.683, 1.596, 1.179, 1.639, 3.281, 1.697, 2.085,\n",
    "       1.939, 1.788, 1.691, 2.002, 4.303, 4.246, 2.274, 3.056, 2.503,\n",
    "       1.905, 1.32 , 3.037, 3.741, 2.66 , 3.357, 2.896, 3.788, 0.838,\n",
    "       0.904, 1.032, 0.904, 1.08 , 0.99 , 0.9  , 0.861, 0.906, 1.011,\n",
    "       1.089, 0.832, 0.8  , 1.292, 0.81 , 1.064, 0.911, 0.846, 1.32 ,\n",
    "       1.41 , 1.115, 1.169, 1.164, 1.341, 1.219, 1.127, 1.272, 1.253,\n",
    "       1.12 , 1.118, 1.89 , 1.26 , 1.4  , 1.264, 1.06 , 1.132, 1.466,\n",
    "       1.092, 1.628, 0.96 , 1.075, 1.428, 1.358, 1.41 , 1.711, 1.483,\n",
    "       1.14 , 1.549, 1.41 , 1.24 , 1.712, 1.58 , 1.669, 1.029, 1.103,\n",
    "       2.161, 1.65 , 1.2  , 1.17 , 1.199, 1.695, 1.157, 1.41 , 1.174,\n",
    "       1.593, 1.093, 1.77 , 1.436, 1.124, 1.139, 1.638, 1.328, 1.273,\n",
    "       1.082, 1.578, 0.796, 1.386, 1.452, 1.513, 1.578, 1.736, 1.473,\n",
    "       1.15 , 1.127, 1.144, 0.972, 2.306, 1.479, 1.43 , 1.8  , 1.953,\n",
    "       1.12 , 1.232, 0.984, 2.329, 1.351, 1.376, 1.566, 1.115, 1.032,\n",
    "       1.419, 1.261, 1.637, 1.338, 2.254, 1.441, 1.991, 2.126, 1.094,\n",
    "       1.462, 2.258, 1.074, 2.111, 1.686, 1.915, 2.367, 1.962, 1.406,\n",
    "       1.789, 1.876, 1.235, 2.504, 1.676, 1.367, 1.899, 1.636, 1.828,\n",
    "       1.438, 1.451, 1.52 , 1.506, 2.605, 1.196, 1.621, 1.811, 1.54 ,\n",
    "       1.543, 2.494, 1.65 , 2.214, 2.28 , 1.443, 1.582, 1.857, 1.735,\n",
    "       2.096, 1.72 , 2.16 , 1.382, 1.721, 1.328, 1.982, 1.144, 1.623,\n",
    "       1.457, 2.555, 1.577, 2.592, 1.401, 1.502, 1.327, 1.8  , 2.169,\n",
    "       2.457, 2.004, 2.212, 3.134, 1.36 , 1.276, 2.962, 1.888, 1.548,\n",
    "       2.109, 2.484, 2.258, 2.212, 1.616, 2.372, 2.606, 2.877, 2.96 ,\n",
    "       2.172, 2.1  , 1.795, 2.295, 2.577, 1.727, 1.485, 1.655, 2.049,\n",
    "       2.875, 2.199, 1.304, 2.334, 2.278, 1.493, 2.787, 2.824, 3.261,\n",
    "       2.053, 2.379, 3.173, 1.348, 1.252, 3.229, 3.863, 2.356, 3.579,\n",
    "       1.512, 0.611, 0.876, 0.933, 0.864, 1.011, 1.158, 1.092, 0.956,\n",
    "       1.139, 1.058, 1.04 , 1.354, 1.051, 0.682, 1.161, 1.004, 1.229,\n",
    "       1.249, 1.161, 1.01 , 1.462, 1.269, 1.188, 1.57 , 1.093, 0.962,\n",
    "       1.089, 1.127, 1.309, 0.97 , 1.144, 1.   , 1.206, 1.285, 1.543,\n",
    "       0.884, 1.019, 1.392, 0.924, 1.217, 1.67 , 1.302, 1.488, 1.373,\n",
    "       1.381, 1.265, 0.881, 1.608, 1.344, 1.202, 1.104, 1.232, 1.638,\n",
    "       1.177, 1.582, 0.904, 1.34 , 1.204, 1.477, 1.497, 0.96 , 1.428,\n",
    "       1.039, 1.529, 1.892, 1.887, 1.294, 1.638, 1.677, 1.073, 1.231,\n",
    "       1.175, 1.416, 1.358, 1.609, 1.968, 1.089, 1.296, 1.189, 0.795,\n",
    "       1.371, 1.31 , 1.262, 1.74 , 1.517, 1.45 , 1.416, 0.888, 1.882,\n",
    "       1.302, 1.418, 1.319, 1.77 , 1.627, 1.04 , 0.96 , 1.456, 1.45 ,\n",
    "       1.358, 1.329, 1.715, 1.262, 2.28 , 1.477, 1.216, 1.685, 1.362]), 'condo': np.array([1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,\n",
    "       0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1,\n",
    "       0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0,\n",
    "       0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n",
    "       0, 0, 0, 0])})"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/Users/vb/anaconda3/lib/python3.7/site-packages/matplotlib/__init__.py:846: MatplotlibDeprecationWarning: \n",
      "The text.latex.unicode rcparam was deprecated in Matplotlib 2.2 and will be removed in 3.1.\n",
      "  \"2.2\", name=key, obj_type=\"rcparam\", addendum=addendum)\n",
      "/Users/vb/anaconda3/lib/python3.7/site-packages/matplotlib/__init__.py:855: MatplotlibDeprecationWarning: \n",
      "examples.directory is deprecated; in the future, examples will be found relative to the 'datapath' directory.\n",
      "  \"found relative to the 'datapath' directory.\".format(key))\n",
      "/Users/vb/anaconda3/lib/python3.7/site-packages/matplotlib/__init__.py:947: UserWarning: could not find rc file; returning defaults\n",
      "  warnings.warn(message)\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "import numpy.linalg as npl\n",
    "import math\n",
    "import matplotlib.pyplot as plt"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 8.1 Linear and Affine Functions"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "A = np.array([[-.1,2.8,-1.6],[2.3,-.6,-3.6]])\n",
    "f = lambda x: np.matmul(A,x)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "x,y = np.array([1,2,3]),np.array([-3,-1,2])\n",
    "alpha,beta = .5,-1.6"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(array([ 9.47, 16.75]), array([ 9.47, 16.75]))"
      ]
     },
     "execution_count": 12,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#superposition holds for a matrix-vector product function\n",
    "lhs,rhs  = f((alpha*x)+(beta*y)), alpha*f(x)+beta*f(y)\n",
    "lhs,rhs"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "1.7763568394002505e-15"
      ]
     },
     "execution_count": 13,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "npl.norm(lhs-rhs)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([ 2.8, -0.6])"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "f([0,1,0]) #selector array for 2nd col of A"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 73,
   "metadata": {},
   "outputs": [],
   "source": [
    "x = np.array([.2,2.3,1.0])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 74,
   "metadata": {},
   "outputs": [],
   "source": [
    "#demeaning via matrix multiplication vs subtracting mean both linear\n",
    "demeanMM = lambda x:np.matmul((np.identity(len(x)) - (1/len(x))),x)\n",
    "deameanSub = lambda x:x-np.average(x)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 75,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(array([-0.96666667,  1.13333333, -0.16666667]),\n",
       " array([-0.96666667,  1.13333333, -0.16666667]))"
      ]
     },
     "execution_count": 75,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "demeanMM(x),deameanSub(x)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 82,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(array([1, 2]), array([-1,  2]))"
      ]
     },
     "execution_count": 82,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#superposition does not hold for componentwise absolute value\n",
    "f = lambda x: np.abs(x)\n",
    "x,y,alpha,beta = np.array([1,0]),np.array([0,1]),-1,2\n",
    "f(alpha*x + beta*y), alpha*f(x)+beta*f(y) "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 90,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(array([ 2, -1]), array([1, 0]))"
      ]
     },
     "execution_count": 90,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#superposition does not hold for sorting\n",
    "f = lambda x: np.flip(np.sort(x)) #J: sort(x,rev=true)\n",
    "f(alpha*x + beta*y), alpha*f(x)+beta*f(y) "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 8.2 Linear Function Models"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 95,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "77.51999999999998"
      ]
     },
     "execution_count": 95,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#Estimation model for price elasticity of demand\n",
    "prices,demand,costs = np.array([10,20,15]),np.array([5.6,1.5,8.6]),np.array([6.5,11.2,9.8])\n",
    "profit = np.inner((prices-costs),demand)\n",
    "profit"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 104,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(array([[-0.3 ,  0.1 , -0.1 ],\n",
       "        [ 0.1 , -0.5 ,  0.05],\n",
       "        [-0.1 ,  0.05, -0.4 ]]), array([ 9, 21, 14]))"
      ]
     },
     "execution_count": 104,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#known elasticity, proposed new prices\n",
    "elasticity, newPrices = np.array([[-.3,.1,-.1],[.1,-.5,.05],[-.1,.05,-.4]]), np.array([9,21,14])\n",
    "elasticity, newPrices"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 105,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(array([-0.1       ,  0.05      , -0.06666667]),\n",
       " array([ 0.04166667, -0.03833333,  0.03916667]),\n",
       " array([5.83333333, 1.4425    , 8.93683333]))"
      ]
     },
     "execution_count": 105,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "changedPrices = (newPrices - prices)/prices #fractional price Δs\n",
    "changedDemand = np.matmul(elasticity,changedPrices) #predicted demand Δs\n",
    "changedPrices,changedDemand,newDemand"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 106,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(array([5.83333333, 1.4425    , 8.93683333]), 66.25453333333333)"
      ]
     },
     "execution_count": 106,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "newDemand = demand*(1+changedDemand) #predicted new demand\n",
    "newProfit = np.inner(newPrices-costs,newDemand) #predicted new price\n",
    "newDemand,newProfit\n",
    "#if we trust elasticity model, \n",
    "#we find proposed new prices project profit will go down (77.5->66.2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 123,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(array([[0.90553851],\n",
       "        [1.27279221]]), array([[0.9       ],\n",
       "        [1.27279221]]), array([[0.70710678],\n",
       "        [0.70710678]]), array([[0.5       ],\n",
       "        [0.70710678]]))"
      ]
     },
     "execution_count": 123,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#a taylor approximation of a distance function:\n",
    "x,y,z = np.array([.1,.1]),np.array([.5,.5]), np.array([0,0])\n",
    "a,b = np.array([1,0]),np.array([1,1])\n",
    "\n",
    "f = lambda x: np.array([[npl.norm(x-a)],[npl.norm(x-b)]])\n",
    "df = lambda z: np.array([[np.divide((z-a),npl.norm(z-a))],\n",
    "                         [np.divide((z-b),npl.norm(z-b))]])\n",
    "fhat = lambda x: f(z) + np.matmul(df(z),(x-z))\n",
    "\n",
    "f(x), fhat(x),f(y), fhat(y)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 192,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(774, (774, 2), array([[0.941, 2.   ],\n",
       "        [1.146, 3.   ],\n",
       "        [0.909, 3.   ],\n",
       "        ...,\n",
       "        [1.216, 3.   ],\n",
       "        [1.685, 4.   ],\n",
       "        [1.362, 3.   ]]))"
      ]
     },
     "execution_count": 192,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#revisiting house sales regression model from ch2\n",
    "price = D[\"price\"]\n",
    "area = D[\"area\"]\n",
    "beds = D[\"beds\"]\n",
    "X = np.array([area,beds]).transpose() #note: in Julia you tranpose with a simple ' at the end of brackets\n",
    "len(price), np.shape(X),X"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 193,
   "metadata": {},
   "outputs": [],
   "source": [
    "#parameters for regression model:\n",
    "beta, v = np.array([148.73,-18.85]), 54.40\n",
    "#a beta to each feature (area,beds), and an offset/scalar to each price"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 217,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(74.84571862623024, 112.78216159756509)"
      ]
     },
     "execution_count": 217,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "rms = lambda x: npl.norm(x) / np.sqrt(len(x))\n",
    "priceHat = np.inner(beta,X) + v\n",
    "# priceHat = np.array([(beta[0]*area) + (beta[1]*beds)])+v\n",
    "#priceHat from Ch2 not working due to mismatched shapes\n",
    "rd = price - priceHat\n",
    "rms(rd), np.std(price)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 8.3 Systems of Linear Equations"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 255,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([0, 0])"
      ]
     },
     "execution_count": 255,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#balancing chemical reactions\n",
    "reactions = np.array([[2],[1]])\n",
    "products = np.array([[2,0],[0,2]])\n",
    "coeff = np.array([2,2,1]) #balancing coeffs\n",
    "np.matmul(np.block([reactions,-products]),coeff) \n",
    "#J: [R -P]*coeff\n",
    "#build block matrix with negative products, and matrix multiply with coefficients"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.4"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
