{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "使用GPR算法建立Sepal.Length、Sepal.Width、Petal.Length对Petal.Width回归问题的高斯过程回归模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import pandas as pd\n",
    "import numpy as np\n",
    "from sklearn.model_selection import train_test_split\n",
    "\n",
    "# 准备基础数据\n",
    "iris = pd.read_csv(\"http://image.cador.cn/data/iris.csv\")\n",
    "x,y = iris.drop(columns=['Species','Petal.Width']),iris['Petal.Width']\n",
    "\n",
    "# 标准化处理\n",
    "x = x.apply(lambda v:(v-np.mean(v))/np.std(v))\n",
    "x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.33, random_state=1)\n",
    "\n",
    "# 初始化参数\n",
    "n = x_train.shape[0]\n",
    "epsilon = 1e-3\n",
    "theta1 = 1\n",
    "theta2 = 1\n",
    "theta3 = 1\n",
    "learnRate = 0.005"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "进行迭代求取最优超参"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 ---delta1: -15.43597735905513 delta2: 28.94230890212499 delta3: 47.040787150700105\n",
      "1 ---delta1: -11.202121918475905 delta2: 20.269730245089814 delta3: 46.90575619288189\n",
      "2 ---delta1: -9.326096699821788 delta2: 15.674590488714756 delta3: 46.591193943104486\n",
      "3 ---delta1: -8.217168550831605 delta2: 12.588401264999412 delta3: 46.12465918227182\n",
      "4 ---delta1: -7.424331779759415 delta2: 10.194781115577216 delta3: 45.476895158353685\n",
      "5 ---delta1: -6.772508024353025 delta2: 8.164291391288439 delta3: 44.581220614061664\n",
      "6 ---delta1: -6.184085450864703 delta2: 6.363263446252443 delta3: 43.32860502198186\n",
      "7 ---delta1: -5.617944097305971 delta2: 4.750734611577924 delta3: 41.56226203925314\n",
      "8 ---delta1: -5.038472004133996 delta2: 3.322759104646913 delta3: 39.08619560434109\n",
      "9 ---delta1: -4.402313025138282 delta2: 2.076985098144359 delta3: 35.705227454231334\n",
      "10 ---delta1: -3.6685662796210785 delta2: 1.0003234571120814 delta3: 31.312385183222588\n",
      "11 ---delta1: -2.8317359033229046 delta2: 0.08175075587269731 delta3: 26.01527033285342\n",
      "12 ---delta1: -1.9533133874734574 delta2: -0.6649129641803064 delta3: 20.23092983565916\n",
      "13 ---delta1: -1.152538396714272 delta2: -1.1979582619659865 delta3: 14.62362548337762\n",
      "14 ---delta1: -0.5448997490162739 delta2: -1.4817814764810535 delta3: 9.841616086914122\n",
      "15 ---delta1: -0.1765062427380535 delta2: -1.5307366422308526 delta3: 6.231598393130227\n",
      "16 ---delta1: -0.013499760365725422 delta2: -1.4138290277487933 delta3: 3.7703161385959945\n",
      "17 ---delta1: 0.017114135024517196 delta2: -1.2175964194557594 delta3: 2.21347673237576\n",
      "18 ---delta1: -0.016307584163257616 delta2: -1.0092750051186847 delta3: 1.276062937458434\n",
      "19 ---delta1: -0.0688682390763411 delta2: -0.8252786804986663 delta3: 0.7279036411599904\n",
      "20 ---delta1: -0.11794837995470786 delta2: -0.6778859672264659 delta3: 0.4123685805057917\n",
      "21 ---delta1: -0.15531992898468872 delta2: -0.5660518346379888 delta3: 0.2320773391272155\n",
      "22 ---delta1: -0.18007385140856869 delta2: -0.4834548565186445 delta3: 0.129335772968723\n",
      "23 ---delta1: -0.19421032766803137 delta2: -0.42287511677189826 delta3: 0.07080384775977677\n",
      "24 ---delta1: -0.20040549635782767 delta2: -0.37803743472157336 delta3: 0.03744278961931258\n",
      "25 ---delta1: -0.20108118955124965 delta2: -0.3441082285846875 delta3: 0.018430020457387286\n",
      "26 ---delta1: -0.1981288036704143 delta2: -0.317608551882298 delta3: 0.007616305922333311\n",
      "27 ---delta1: -0.1929134870943443 delta2: -0.29613902087155 delta3: 0.001501444422466136\n",
      "28 ---delta1: -0.1863726722882575 delta2: -0.2780912033939593 delta3: -0.0019129047161428048\n",
      "29 ---delta1: -0.1791277029179028 delta2: -0.2624074672329648 delta3: -0.0037717266251817705\n",
      "30 ---delta1: -0.17157900331175924 delta2: -0.24840102025205368 delta3: -0.00473353314177416\n",
      "31 ---delta1: -0.16397815520810255 delta2: -0.23562887417568845 delta3: -0.0051784492615638555\n",
      "32 ---delta1: -0.15647904114049282 delta2: -0.2238055477241936 delta3: -0.005326649488878843\n",
      "33 ---delta1: -0.149172670767868 delta2: -0.21274599326078203 delta3: -0.0053060377005920145\n",
      "34 ---delta1: -0.1421102357576558 delta2: -0.20232858309419655 delta3: -0.005191083815503816\n",
      "35 ---delta1: -0.13531806133173063 delta2: -0.1924714614758365 delta3: -0.005025202304885568\n",
      "36 ---delta1: -0.12880715233733042 delta2: -0.18311761401350424 delta3: -0.004833698919931351\n",
      "37 ---delta1: -0.1225792136036965 delta2: -0.17422552960920967 delta3: -0.0046312892881061885\n",
      "38 ---delta1: -0.11663040917168033 delta2: -0.16576340501779185 delta3: -0.004426480636524843\n",
      "39 ---delta1: -0.11095368951198381 delta2: -0.15770557124016804 delta3: -0.004224133534187047\n",
      "40 ---delta1: -0.10554022183529455 delta2: -0.15003030009184926 delta3: -0.004026963980287235\n",
      "41 ---delta1: -0.10038026256539467 delta2: -0.1427184651089668 delta3: -0.0038364264302828133\n",
      "42 ---delta1: -0.09546368577992936 delta2: -0.13575272375293324 delta3: -0.003653234033528463\n",
      "43 ---delta1: -0.0907803002027876 delta2: -0.12911702003636805 delta3: -0.0034776657011192924\n",
      "44 ---delta1: -0.0863200371057502 delta2: -0.12279628030129075 delta3: -0.0033097477094798933\n",
      "45 ---delta1: -0.08207305945535026 delta2: -0.11677622613613892 delta3: -0.003149361175267984\n",
      "46 ---delta1: -0.07802982343373799 delta2: -0.11104325775575674 delta3: -0.0029963057047197594\n",
      "47 ---delta1: -0.07418111083975987 delta2: -0.10558438018370708 delta3: -0.002850337116484525\n",
      "48 ---delta1: -0.07051804427297981 delta2: -0.10038715271083731 delta3: -0.0027111897979708033\n",
      "49 ---delta1: -0.06703209146862577 delta2: -0.09543965554767553 delta3: -0.0025785898765917636\n",
      "50 ---delta1: -0.063715063525418 delta2: -0.09073046291037912 delta3: -0.002452262959955931\n",
      "51 ---delta1: -0.06055910913190843 delta2: -0.08624862374941245 delta3: -0.0023319386999531844\n",
      "52 ---delta1: -0.05755670683887004 delta2: -0.08198364215850873 delta3: -0.0022173533196365725\n",
      "53 ---delta1: -0.05470065563307003 delta2: -0.07792546274129108 delta3: -0.002108251109788739\n",
      "54 ---delta1: -0.051984065106776 delta2: -0.07406445516289395 delta3: -0.002004385100136119\n",
      "55 ---delta1: -0.04940034511252023 delta2: -0.07039139938736838 delta3: -0.0019055174744053716\n",
      "56 ---delta1: -0.04694319528021751 delta2: -0.0668974717559756 delta3: -0.001811419571829731\n",
      "57 ---delta1: -0.04460659453155458 delta2: -0.06357423144439878 delta3: -0.0017218718895932739\n",
      "58 ---delta1: -0.04238479069724832 delta2: -0.06041360654754868 delta3: -0.0016366638586760018\n",
      "59 ---delta1: -0.040272290298155156 delta2: -0.05740788104655081 delta3: -0.0015555936970130801\n",
      "60 ---delta1: -0.038263848448295334 delta2: -0.05454968109335567 delta3: -0.0014784681276580613\n",
      "61 ---delta1: -0.036354459040918385 delta2: -0.051831963086950594 delta3: -0.001405102146506465\n",
      "62 ---delta1: -0.034539345171889835 delta2: -0.049247999984782354 delta3: -0.001335318735641522\n",
      "63 ---delta1: -0.03281394974478857 delta2: -0.04679136955842722 delta3: -0.0012689485928447652\n",
      "64 ---delta1: -0.031173926430984267 delta2: -0.04445594247797047 delta3: -0.0012058298685175828\n",
      "65 ---delta1: -0.029615130867362893 delta2: -0.04223586972836557 delta3: -0.0011458078873616273\n",
      "66 ---delta1: -0.02813361207322629 delta2: -0.04012557211864376 delta3: -0.0010887348742301128\n",
      "67 ---delta1: -0.026725604199580033 delta2: -0.0381197285123811 delta3: -0.0010344697309534467\n",
      "68 ---delta1: -0.025387518602467907 delta2: -0.036213265195911504 delta3: -0.0009828777000180366\n",
      "69 ---delta1: -0.02411593604814044 delta2: -0.034401345544011974 delta3: -0.0009338301817223282\n",
      "70 ---delta1: -0.022907599332690154 delta2: -0.03267935967600266 delta3: -0.0008872044652932232\n",
      "71 ---delta1: -0.021759406111002022 delta2: -0.031042914778623754 delta3: -0.0008428834769134141\n",
      "72 ---delta1: -0.02066840193870334 delta2: -0.02948782556721241 delta3: -0.0008007555612863371\n",
      "73 ---delta1: -0.019631773686707987 delta2: -0.028010105511302896 delta3: -0.0007607142620287277\n",
      "74 ---delta1: -0.018646843186001405 delta2: -0.026605957310728456 delta3: -0.0007226580689518869\n",
      "75 ---delta1: -0.017711061000575512 delta2: -0.025271765268296775 delta3: -0.0006864902464229772\n",
      "76 ---delta1: -0.016822000635620782 delta2: -0.02400408673304355 delta3: -0.0006521185913115346\n",
      "77 ---delta1: -0.01597735284923374 delta2: -0.02279964398131895 delta3: -0.000619455283441539\n",
      "78 ---delta1: -0.01517492021305955 delta2: -0.02165531740647353 delta3: -0.0005884166344642949\n",
      "79 ---delta1: -0.01441261196644561 delta2: -0.020568137697051014 delta3: -0.000558922950794738\n",
      "80 ---delta1: -0.013688439006806163 delta2: -0.019535279158631624 delta3: -0.0005308983286411717\n",
      "81 ---delta1: -0.013000509165035368 delta2: -0.018554053109525626 delta3: -0.000504270515762073\n",
      "82 ---delta1: -0.012347022623909965 delta2: -0.017621901391326134 delta3: -0.00047897069767799394\n",
      "83 ---delta1: -0.011726267549871494 delta2: -0.01673639036038921 delta3: -0.00045493339302993263\n",
      "84 ---delta1: -0.011136615975935626 delta2: -0.015895205062214757 delta3: -0.00043209625350471015\n",
      "85 ---delta1: -0.010576519760089553 delta2: -0.015096143475741286 delta3: -0.000410399966831676\n",
      "86 ---delta1: -0.010044506812942444 delta2: -0.014337111447829898 delta3: -0.00038978808259315656\n",
      "87 ---delta1: -0.009539177435431867 delta2: -0.013616117338031586 delta3: -0.0003702068993618468\n",
      "88 ---delta1: -0.009059200880763285 delta2: -0.012931267375751787 delta3: -0.0003516053004091191\n",
      "89 ---delta1: -0.008603312038836464 delta2: -0.01228076075460649 delta3: -0.00033393469299625167\n",
      "90 ---delta1: -0.008170308219707323 delta2: -0.011662885288373559 delta3: -0.0003171488457525129\n",
      "91 ---delta1: -0.007759046211866405 delta2: -0.011076013212559133 delta3: -0.0003012037723948424\n",
      "92 ---delta1: -0.007368439343768074 delta2: -0.010518597111506267 delta3: -0.00028605765927380844\n",
      "93 ---delta1: -0.00699745477008662 delta2: -0.009989165920647025 delta3: -0.0002716707435581611\n",
      "94 ---delta1: -0.0066451108638219125 delta2: -0.009486321209596582 delta3: -0.00025800520141672223\n",
      "95 ---delta1: -0.006310474656771703 delta2: -0.009008734114537731 delta3: -0.0002450250744558957\n",
      "96 ---delta1: -0.005992659585686511 delta2: -0.00855514121387202 delta3: -0.00023269619055810153\n",
      "97 ---delta1: -0.005690823084830043 delta2: -0.00812434203107415 delta3: -0.00022098604103604202\n",
      "98 ---delta1: -0.0054041645155464835 delta2: -0.007715195318219514 delta3: -0.00020986373336029374\n",
      "99 ---delta1: -0.005131923084078949 delta2: -0.0073266170960373245 delta3: -0.00019929990736500258\n",
      "100 ---delta1: -0.004873375918439393 delta2: -0.006957576878562577 delta3: -0.00018926664972696017\n",
      "101 ---delta1: -0.004627836173682454 delta2: -0.006607095268826413 delta3: -0.00017973742170340756\n",
      "102 ---delta1: -0.004394651209757683 delta2: -0.0062742422364294725 delta3: -0.00017068700686451166\n",
      "103 ---delta1: -0.004173201009628258 delta2: -0.005958133481623662 delta3: -0.0001620914532622919\n",
      "104 ---delta1: -0.003962896492843981 delta2: -0.005657929132613759 delta3: -0.0001539279661315618\n",
      "105 ---delta1: -0.0037631780051512465 delta2: -0.005372831014575752 delta3: -0.0001461749153648384\n",
      "106 ---delta1: -0.0035735138466428396 delta2: -0.005102081163116168 delta3: -0.00013881173876484354\n",
      "107 ---delta1: -0.003393398951786253 delta2: -0.004844959094501888 delta3: -0.00013181889194413543\n",
      "108 ---delta1: -0.0032223534947313226 delta2: -0.0046007803333907304 delta3: -0.00012517779109799676\n",
      "109 ---delta1: -0.003059921656575426 delta2: -0.004368894842166071 delta3: -0.00011887080987094123\n",
      "110 ---delta1: -0.002905670453198894 delta2: -0.004148685022453691 delta3: -0.00011288117980967627\n",
      "111 ---delta1: -0.002759188585013561 delta2: -0.003939564100551962 delta3: -0.00010719297950601003\n",
      "112 ---delta1: -0.0026200853796645163 delta2: -0.003740974743845271 delta3: -0.00010179106538288352\n",
      "113 ---delta1: -0.0024879897192207068 delta2: -0.00355238742563202 delta3: -9.666106944195008e-05\n",
      "114 ---delta1: -0.0023625490970466956 delta2: -0.0033732992615291835 delta3: -9.17893321954466e-05\n",
      "115 ---delta1: -0.002243428713995854 delta2: -0.0032032326679214407 delta3: -8.716287699428449e-05\n",
      "116 ---delta1: -0.002130310539310898 delta2: -0.0030417337030996805 delta3: -8.276938746121232e-05\n",
      "117 ---delta1: -0.00202289251761556 delta2: -0.002888371442974602 delta3: -7.859714545332963e-05\n",
      "118 ---delta1: -0.0019208877551974979 delta2: -0.002742736528976053 delta3: -7.463503754934209e-05\n",
      "119 ---delta1: -0.0018240237711815155 delta2: -0.0026044399925204686 delta3: -7.087249647241833e-05\n",
      "120 ---delta1: -0.0017320417507633579 delta2: -0.0024731126293211503 delta3: -6.72994790136272e-05\n",
      "121 ---delta1: -0.001644695898654902 delta2: -0.002348403688319678 delta3: -6.390646356635443e-05\n",
      "122 ---delta1: -0.0015617527732949554 delta2: -0.002229979739187371 delta3: -6.0684389303844455e-05\n",
      "123 ---delta1: -0.0014829906347628707 delta2: -0.002117524839853502 delta3: -5.762465440284359e-05\n",
      "124 ---delta1: -0.0014081989472458645 delta2: -0.0020107379996758823 delta3: -5.471909997822877e-05\n",
      "125 ---delta1: -0.0013371777025490417 delta2: -0.0019093338245212976 delta3: -5.195995296247702e-05\n",
      "126 ---delta1: -0.0012697369647831636 delta2: -0.001813041486247613 delta3: -4.9339858605890186e-05\n",
      "127 ---delta1: -0.0012056963685722621 delta2: -0.0017216033934346342 delta3: -4.685181045971376e-05\n",
      "128 ---delta1: -0.0011448846254573652 delta2: -0.0016347749772265274 delta3: -4.4489153125937264e-05\n",
      "129 ---delta1: -0.001087139029252171 delta2: -0.0015523241082888717 delta3: -4.224558847454318e-05\n",
      "130 ---delta1: -0.0010323050881666518 delta2: -0.0014740301523445964 delta3: -4.0115114032346355e-05\n",
      "131 ---delta1: -0.0009802360870097004 delta2: -0.0013996838534353628 delta3: -3.8092026130698287e-05\n",
      "132 ---delta1: -0.0009307926798527433 delta2: -0.0013290861329409154 delta3: -3.617093219787648e-05\n",
      "133 ---delta1: -0.0008838425519108739 delta2: -0.001262048207419042 delta3: -3.4346682994623734e-05\n",
      "134 ---delta1: -0.0008392600441702314 delta2: -0.001198390502793245 delta3: -3.2614391166418955e-05\n",
      "135 ---delta1: -0.0007969257868261082 delta2: -0.0011379429463538315 delta3: -3.096944919178668e-05\n",
      "136 ---delta1: -0.0007567264970802512 delta2: -0.0010805435653793438 delta3: -2.9407441928697153e-05\n",
      "137 ---delta1: -0.0007185545398513682 delta2: -0.0010260387680993688 delta3: -2.792418677444175e-05\n",
      "138 ---delta1: -0.0006823077010231771 delta2: -0.0009742826524217207 delta3: -2.651572209799724e-05\n"
     ]
    }
   ],
   "source": [
    "def delta(bgc,delta,y):\n",
    "    bgc_inv = np.linalg.inv(bgc)\n",
    "    a = np.sum(np.diag(np.matmul(bgc_inv,delta)))\n",
    "    b = np.matmul(np.matmul(y,np.matmul(np.matmul(bgc_inv,delta),bgc_inv)),y)\n",
    "    return 0.5*(a - b)\n",
    "\n",
    "def bigc(data,t1,t2,t3):\n",
    "    rows = data.shape[0]\n",
    "    tmp = np.zeros((rows,rows))\n",
    "    for e in range(rows):\n",
    "        x_tmp = data.iloc[e,:]\n",
    "        tmp[e,:] = np.exp(2*t2)*np.exp(-np.sum((data - x_tmp)**2,axis=1)/(2*np.exp(2*t1)))\n",
    "    return tmp + np.identity(rows)*np.exp(2*t3)\n",
    "\n",
    "for i in range(1000):\n",
    "    bigC = bigc(x_train, theta1, theta2, theta3)\n",
    "    # 更新theta1\n",
    "    delta1 = np.zeros((n,n))\n",
    "    for j in range(n):\n",
    "        xi = x_train.iloc[j,:]\n",
    "        deltaX = (x_train - xi)**2\n",
    "        rsobj = np.sum(deltaX,axis=1)\n",
    "        delta1[j,:]=np.exp(2*theta2)*np.exp(2*theta2)*np.exp(-rsobj/(2*np.exp(2*theta1)))*rsobj/(2*np.exp(2*theta1))\n",
    "    \n",
    "    delta1 = delta(bigC,delta1,y_train)\n",
    "    theta1=theta1-learnRate*delta1\n",
    "    \n",
    "    # 更新theta2\n",
    "    delta2 = np.zeros((n,n))\n",
    "    for j in range(n):\n",
    "        xi = x_train.iloc[j,:]\n",
    "        deltaX = (x_train - xi)**2\n",
    "        delta2[j,:] = 2*np.exp(2*theta2)*np.exp(2*theta2)*np.exp(-np.sum(deltaX,axis=1)/(2*np.exp(2*theta1)))\n",
    "    \n",
    "    delta2 = delta(bigC,delta2,y_train)\n",
    "    theta2=theta2-learnRate*delta2\n",
    "    \n",
    "    # 更新theta3\n",
    "    delta3 = np.identity(n)*np.exp(2*theta3)\n",
    "    delta3 = delta(bigC,delta3,y_train)\n",
    "    theta3=theta3-learnRate*delta3\n",
    "    print(i,\"---delta1:\",delta1,\"delta2:\",delta2,\"delta3:\",delta3)\n",
    "    \n",
    "    # 当超参数的变化量绝对值的最大值小于给定精度时，退出循环\n",
    "    if np.max(np.abs([delta1,delta2,delta3])) < epsilon :\n",
    "        break"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(1.4767280756922787, 0.5247171125076914, -1.7670980634788682)"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#求得的三个超参数分别为\n",
    "theta1,theta2,theta3"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "基于这些超参数，进行GPR预测"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "2.0819543717906575"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 进行预测并计算残差平方和\n",
    "bigC = bigc(x_train, theta1, theta2, theta3)\n",
    "alpha = np.matmul(np.linalg.inv(bigC),y_train)\n",
    "ypred = []\n",
    "ysigma = []\n",
    "tn = x_test.shape[0]\n",
    "for j in range(tn):\n",
    "    xi = x_test.iloc[j,:]\n",
    "    deltaX = (x_train - xi)**2\n",
    "    t0 = np.exp(2*theta2)*np.exp(-np.sum(deltaX,axis=1)/(2*np.exp(2*theta1)))\n",
    "    ypred.append(np.matmul(t0,alpha))\n",
    "    ysigma.append(np.sqrt(np.exp(2*theta2) - np.matmul(np.matmul(t0,np.linalg.inv(bigC)),t0)))\n",
    "\n",
    "# 最终得到的残差平方和为\n",
    "np.sum((y_test.values - ypred)**2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>y_test</th>\n",
       "      <th>ypred</th>\n",
       "      <th>sigma</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>0.2</td>\n",
       "      <td>0.170740</td>\n",
       "      <td>0.114043</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>98</th>\n",
       "      <td>1.1</td>\n",
       "      <td>0.820464</td>\n",
       "      <td>0.048525</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>75</th>\n",
       "      <td>1.4</td>\n",
       "      <td>1.410814</td>\n",
       "      <td>0.047854</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>0.4</td>\n",
       "      <td>0.201179</td>\n",
       "      <td>0.067488</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>131</th>\n",
       "      <td>2.0</td>\n",
       "      <td>2.145182</td>\n",
       "      <td>0.151244</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "     y_test     ypred     sigma\n",
       "14      0.2  0.170740  0.114043\n",
       "98      1.1  0.820464  0.048525\n",
       "75      1.4  1.410814  0.047854\n",
       "16      0.4  0.201179  0.067488\n",
       "131     2.0  2.145182  0.151244"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "pd.DataFrame({'y_test':y_test,'ypred':ypred,'sigma':ysigma}).head()"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
