{"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"name":"langevin_heteroskedastic.ipynb","version":"0.3.2","provenance":[],"collapsed_sections":[]},"kernelspec":{"name":"python3","display_name":"Python 3"},"accelerator":"GPU"},"cells":[{"cell_type":"code","metadata":{"id":"xAYPKSFrG8AF","colab_type":"code","outputId":"6b92485f-13ed-4ee3-96bd-9fda963bd6cf","executionInfo":{"status":"ok","timestamp":1558131802559,"user_tz":-60,"elapsed":11540,"user":{"displayName":"Stratis Markou","photoUrl":"","userId":"09754366312766083286"}},"colab":{"base_uri":"https://localhost:8080/","height":258}},"source":["!pip3 install http://download.pytorch.org/whl/cu92/torch-0.4.1-cp36-cp36m-linux_x86_64.whl\n","!pip3 install torchvision\n","!pip3 install GPy\n","import pandas as pd\n","import zipfile\n","import urllib.request\n","import GPy\n","import time\n","import copy\n","import math\n","import matplotlib.pyplot as plt\n","import numpy as np\n","import torch\n","import torch.nn as nn\n","import torch.nn.functional as F\n","from torch.autograd import Variable\n","from torch.optim import Optimizer\n","from torch.optim.sgd import SGD\n","from sklearn.model_selection import KFold\n","\n","from torchvision import datasets, transforms\n","from torchvision.utils import make_grid\n","from tqdm import tqdm, trange\n","from google.colab import files\n","%config InlineBackend.figure_format = 'svg'"],"execution_count":2,"outputs":[{"output_type":"stream","text":["Requirement already satisfied: torch==0.4.1 from http://download.pytorch.org/whl/cu92/torch-0.4.1-cp36-cp36m-linux_x86_64.whl in /usr/local/lib/python3.6/dist-packages (0.4.1)\n","Requirement already satisfied: torchvision in /usr/local/lib/python3.6/dist-packages (0.2.2.post3)\n","Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from torchvision) (1.16.3)\n","Requirement already satisfied: pillow>=4.1.1 in /usr/local/lib/python3.6/dist-packages (from torchvision) (4.3.0)\n","Requirement already satisfied: torch in /usr/local/lib/python3.6/dist-packages (from torchvision) (0.4.1)\n","Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from torchvision) (1.12.0)\n","Requirement already satisfied: olefile in /usr/local/lib/python3.6/dist-packages (from pillow>=4.1.1->torchvision) (0.46)\n","Requirement already satisfied: GPy in /usr/local/lib/python3.6/dist-packages (1.9.6)\n","Requirement already satisfied: numpy>=1.7 in /usr/local/lib/python3.6/dist-packages (from GPy) (1.16.3)\n","Requirement already satisfied: scipy>=0.16 in /usr/local/lib/python3.6/dist-packages (from GPy) (1.2.1)\n","Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from GPy) (1.12.0)\n","Requirement already satisfied: paramz>=0.9.0 in /usr/local/lib/python3.6/dist-packages (from GPy) (0.9.5)\n","Requirement already satisfied: decorator>=4.0.10 in /usr/local/lib/python3.6/dist-packages (from paramz>=0.9.0->GPy) (4.4.0)\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"1-7dNcVmHA3I","colab_type":"code","outputId":"1207e43d-b845-495c-b483-cdeaf19dc7cc","executionInfo":{"status":"ok","timestamp":1558131802560,"user_tz":-60,"elapsed":11530,"user":{"displayName":"Stratis Markou","photoUrl":"","userId":"09754366312766083286"}},"colab":{"base_uri":"https://localhost:8080/","height":34}},"source":["torch.cuda.device(0)\n","torch.cuda.get_device_name(torch.cuda.current_device())"],"execution_count":3,"outputs":[{"output_type":"execute_result","data":{"text/plain":["'Tesla T4'"]},"metadata":{"tags":[]},"execution_count":3}]},{"cell_type":"code","metadata":{"id":"_1S5kt0omQ-N","colab_type":"code","colab":{}},"source":["def to_variable(var=(), cuda=True, volatile=False):\n","    out = []\n","    for v in var:\n","        \n","        if isinstance(v, np.ndarray):\n","            v = torch.from_numpy(v).type(torch.FloatTensor)\n","\n","        if not v.is_cuda and cuda:\n","            v = v.cuda()\n","\n","        if not isinstance(v, Variable):\n","            v = Variable(v, volatile=volatile)\n","\n","        out.append(v)\n","    return out"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"CK1r-6p04Kv8","colab_type":"code","colab":{}},"source":["class Langevin_SGD(Optimizer):\n","\n","    def __init__(self, params, lr, weight_decay=0, nesterov=False):\n","        if lr < 0.0:\n","            raise ValueError(\"Invalid learning rate: {}\".format(lr))\n","        if weight_decay < 0.0:\n","            raise ValueError(\"Invalid weight_decay value: {}\".format(weight_decay))\n","\n","        defaults = dict(lr=lr, weight_decay=weight_decay)\n","        \n","        super(Langevin_SGD, self).__init__(params, defaults)\n","\n","    def __setstate__(self, state):\n","        super(SGD, self).__setstate__(state)\n","        for group in self.param_groups:\n","            group.setdefault('nesterov', False)\n","\n","    def step(self, closure=None):\n","        \n","        loss = None\n","        if closure is not None:\n","            loss = closure()\n","\n","        for group in self.param_groups:\n","            \n","            weight_decay = group['weight_decay']\n","            \n","            for p in group['params']:\n","                if p.grad is None:\n","                    continue\n","                d_p = p.grad.data\n","                \n","                if len(p.shape) == 1 and p.shape[0] == 1:\n","                    p.data.add_(-group['lr'], d_p)\n","                    \n","                else:\n","                    if weight_decay != 0:\n","                        d_p.add_(weight_decay, p.data)\n","\n","                    unit_noise = Variable(p.data.new(p.size()).normal_())\n","\n","                    p.data.add_(-group['lr'], 0.5*d_p + unit_noise/group['lr']**0.5)\n","\n","        return loss"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"Va8V78eFFsc9","colab_type":"code","colab":{}},"source":["def log_gaussian_loss(output, target, sigma, no_dim):\n","    exponent = -0.5*(target - output)**2/sigma**2\n","    log_coeff = -no_dim*torch.log(sigma)\n","    \n","    return - (log_coeff + exponent).sum()\n","\n","\n","def get_kl_divergence(weights, prior, varpost):\n","    prior_loglik = prior.loglik(weights)\n","    \n","    varpost_loglik = varpost.loglik(weights)\n","    varpost_lik = varpost_loglik.exp()\n","    \n","    return (varpost_lik*(varpost_loglik - prior_loglik)).sum()\n","\n","\n","class gaussian:\n","    def __init__(self, mu, sigma):\n","        self.mu = mu\n","        self.sigma = sigma\n","        \n","    def loglik(self, weights):\n","        exponent = -0.5*(weights - self.mu)**2/self.sigma**2\n","        log_coeff = -0.5*(np.log(2*np.pi) + 2*np.log(self.sigma))\n","        \n","        return (exponent + log_coeff).sum()"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"colab_type":"code","id":"ASGi2Ecx5G-F","colab":{}},"source":["class Langevin_Layer(nn.Module):\n","    def __init__(self, input_dim, output_dim):\n","        super(Langevin_Layer, self).__init__()\n","        self.input_dim = input_dim\n","        self.output_dim = output_dim\n","        \n","        self.weights = nn.Parameter(torch.Tensor(self.input_dim, self.output_dim).uniform_(-0.01, 0.01))\n","        self.biases = nn.Parameter(torch.Tensor(self.output_dim).uniform_(-0.01, 0.01))\n","        \n","    def forward(self, x):\n","        \n","        return torch.mm(x, self.weights) + self.biases"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"colab_type":"code","id":"_8dV-QIq5G-I","colab":{}},"source":["class Langevin_Model(nn.Module):\n","    def __init__(self, input_dim, output_dim, num_units):\n","        super(Langevin_Model, self).__init__()\n","        \n","        self.input_dim = input_dim\n","        self.output_dim = output_dim\n","        \n","        # network with two hidden and one output layer\n","        self.layer1 = Langevin_Layer(input_dim, num_units)\n","        self.layer2 = Langevin_Layer(num_units, 2*output_dim)\n","        \n","        # activation to be used between hidden layers\n","        self.activation = nn.ReLU(inplace = True)\n","    \n","    def forward(self, x):\n","        \n","        x = x.view(-1, self.input_dim)\n","        \n","        x = self.layer1(x)\n","        x = self.activation(x)\n","        \n","        x = self.layer2(x)\n","        \n","        return x"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"colab_type":"code","id":"oAYelw3B5G-K","colab":{}},"source":["class Langevin_Wrapper:\n","    def __init__(self, network, learn_rate, batch_size, no_batches, weight_decay):\n","        \n","        self.learn_rate = learn_rate\n","        self.batch_size = batch_size\n","        self.no_batches = no_batches\n","        \n","        self.network = network\n","        self.network.cuda()\n","        \n","        self.optimizer = Langevin_SGD(self.network.parameters(), lr=self.learn_rate, weight_decay=weight_decay)\n","        self.loss_func = log_gaussian_loss\n","    \n","    def fit(self, x, y):\n","        x, y = to_variable(var=(x, y), cuda=True)\n","        \n","        # reset gradient and total loss\n","        self.optimizer.zero_grad()\n","        \n","        output = self.network(x)\n","        loss = self.loss_func(output[:, :1], y, output[:, 1:].exp(), 1)\n","        \n","        loss.backward()\n","        self.optimizer.step()\n","\n","        return loss\n","    \n","    \n","    def test_loss(self, x, y):\n","        x, y = to_variable(var=(x, y), cuda=True)\n","        \n","        output = self.network(x)\n","        loss = self.loss_func(output[:, :1], y, output[:, 1:].exp(), 1)\n","\n","        return loss"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"caJqaR1eGWEY","colab_type":"code","colab":{}},"source":["def eval_ensemble(x, y, ensemble):\n","    \n","    x, y = to_variable(var=(x, y), cuda=True)\n","        \n","    means, stds = [], []\n","    for net in ensemble:\n","        output = net(x)\n","        means.append(output[:, :1, None])\n","        stds.append(output[:, 1:, None].exp())\n","            \n","    means, stds = torch.cat(means, 2), torch.cat(stds, 2)\n","    mean = means.mean(dim=2)\n","    std = (means.var(dim=2) + stds.mean(dim=2)**2)**0.5\n","    loss = log_gaussian_loss(mean, y, std, 1)/len(x)\n","    \n","    rmse = ((mean - y)**2).mean()**0.5\n","\n","    return loss, rmse"],"execution_count":0,"outputs":[]},{"cell_type":"code","metadata":{"id":"ym6HBK-s8GnO","colab_type":"code","outputId":"6ef8be1c-136e-4703-c59a-cf770c37ef99","executionInfo":{"status":"ok","timestamp":1558131812946,"user_tz":-60,"elapsed":21844,"user":{"displayName":"Stratis Markou","photoUrl":"","userId":"09754366312766083286"}},"colab":{"base_uri":"https://localhost:8080/","height":1054}},"source":["np.random.seed(2)\n","no_points = 400\n","lengthscale = 1\n","variance = 1.0\n","sig_noise = 0.3\n","x = np.random.uniform(-3, 3, no_points)[:, None]\n","x.sort(axis=0)\n","\n","\n","k = GPy.kern.RBF(input_dim=1, variance=variance, lengthscale=lengthscale)\n","C = k.K(x, x) + np.eye(no_points)*(x + 2)**2*sig_noise**2\n","\n","y = np.random.multivariate_normal(np.zeros((no_points)), C)[:, None]\n","y = (y - y.mean())\n","x_train = x[75:325]\n","y_train = y[75:325]\n","\n","\n","best_net, best_loss = None, float('inf')\n","num_nets, nets, losses = 50, [], []\n","mix_epochs, burnin_epochs = 100, 1000\n","num_epochs = mix_epochs*num_nets + burnin_epochs + 1\n","\n","batch_size, nb_train = len(x_train), len(x_train)\n","\n","net = Langevin_Wrapper(network=Langevin_Model(input_dim=1, output_dim=1, num_units=200),\n","                       learn_rate=1e-4, batch_size=batch_size, no_batches=1, weight_decay=20)\n","\n","for i in range(num_epochs):\n","    \n","    loss = net.fit(x_train, y_train)\n","    \n","    if i % mix_epochs == 0:\n","        print('Epoch: %4d, Train loss = %8.3f' % (i, loss.cpu().data.numpy()))\n","        \n","    if i % 100 == 0 and i > burnin_epochs: nets.append(copy.deepcopy(net.network))"],"execution_count":11,"outputs":[{"output_type":"stream","text":["Epoch:    0, Train loss =  348.689\n","Epoch:  100, Train loss =  164.954\n","Epoch:  200, Train loss =  105.301\n","Epoch:  300, Train loss =   90.521\n","Epoch:  400, Train loss =   64.924\n","Epoch:  500, Train loss =   55.467\n","Epoch:  600, Train loss =   47.499\n","Epoch:  700, Train loss =   29.145\n","Epoch:  800, Train loss =    7.563\n","Epoch:  900, Train loss =   27.644\n","Epoch: 1000, Train loss =    6.976\n","Epoch: 1100, Train loss =   -3.997\n","Epoch: 1200, Train loss =  -16.176\n","Epoch: 1300, Train loss =    8.504\n","Epoch: 1400, Train loss =   15.614\n","Epoch: 1500, Train loss =  -25.763\n","Epoch: 1600, Train loss =   41.449\n","Epoch: 1700, Train loss =   -6.983\n","Epoch: 1800, Train loss =  -15.342\n","Epoch: 1900, Train loss =  -15.076\n","Epoch: 2000, Train loss =    1.618\n","Epoch: 2100, Train loss =  -40.463\n","Epoch: 2200, Train loss =  -32.578\n","Epoch: 2300, Train loss =   -6.959\n","Epoch: 2400, Train loss =   31.176\n","Epoch: 2500, Train loss =  -18.083\n","Epoch: 2600, Train loss =   -9.308\n","Epoch: 2700, Train loss =  -27.050\n","Epoch: 2800, Train loss =  -36.509\n","Epoch: 2900, Train loss =  -40.281\n","Epoch: 3000, Train loss =    8.372\n","Epoch: 3100, Train loss =  -22.434\n","Epoch: 3200, Train loss =   14.589\n","Epoch: 3300, Train loss =   19.695\n","Epoch: 3400, Train loss =  118.175\n","Epoch: 3500, Train loss =    0.920\n","Epoch: 3600, Train loss =   92.758\n","Epoch: 3700, Train loss =   80.426\n","Epoch: 3800, Train loss =   16.125\n","Epoch: 3900, Train loss =   -5.798\n","Epoch: 4000, Train loss =  -15.234\n","Epoch: 4100, Train loss =  -21.683\n","Epoch: 4200, Train loss =    2.323\n","Epoch: 4300, Train loss =    5.709\n","Epoch: 4400, Train loss =  -24.577\n","Epoch: 4500, Train loss =  -22.278\n","Epoch: 4600, Train loss =  -23.983\n","Epoch: 4700, Train loss =   51.867\n","Epoch: 4800, Train loss =  -29.715\n","Epoch: 4900, Train loss =  -25.799\n","Epoch: 5000, Train loss =  -27.255\n","Epoch: 5100, Train loss =    4.598\n","Epoch: 5200, Train loss =   30.416\n","Epoch: 5300, Train loss =  -23.187\n","Epoch: 5400, Train loss =  -18.623\n","Epoch: 5500, Train loss =   31.036\n","Epoch: 5600, Train loss =  -16.511\n","Epoch: 5700, Train loss =   -4.704\n","Epoch: 5800, Train loss =  -23.138\n","Epoch: 5900, Train loss =  -31.130\n","Epoch: 6000, Train loss =  -21.486\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"tvP-HbNH_Uen","colab_type":"code","outputId":"a8abd888-95db-4f72-b25d-11868a057931","executionInfo":{"status":"ok","timestamp":1558131815347,"user_tz":-60,"elapsed":24234,"user":{"displayName":"Stratis Markou","photoUrl":"","userId":"09754366312766083286"}},"colab":{"base_uri":"https://localhost:8080/","height":560}},"source":["print(\"Using %d networks for prediction\" % len(nets))\n","samples = []\n","noises = []\n","for network in nets:\n","    preds = network.forward(torch.linspace(-5, 5, 200).cuda()).cpu().data.numpy()\n","    samples.append(preds[:, 0])\n","    noises.append(np.exp(preds[:, 1]))\n","    \n","samples = np.array(samples)\n","means = (samples.mean(axis = 0)).reshape(-1)\n","\n","noises = np.array(noises)\n","aleatoric = (noises**2).mean(axis = 0)**0.5\n","epistemic = samples.var(axis = 0)**0.5\n","total_unc = (aleatoric**2 + epistemic**2)**0.5\n","\n","c = ['#1f77b4', '#ff7f0e', '#2ca02c', '#d62728', '#9467bd',\n","     '#8c564b', '#e377c2', '#7f7f7f', '#bcbd22', '#17becf']\n","\n","plt.figure(figsize = (6, 5))\n","plt.style.use('default')\n","plt.scatter(x_train, y_train, s = 10, marker = 'x', color = 'black', alpha = 0.5)\n","plt.fill_between(np.linspace(-5, 5, 200), means + aleatoric, means + total_unc, color = c[0], alpha = 0.3, label = 'Epistemic + Aleatoric')\n","plt.fill_between(np.linspace(-5, 5, 200), means - total_unc, means - aleatoric, color = c[0], alpha = 0.3)\n","plt.fill_between(np.linspace(-5, 5, 200), means - aleatoric, means + aleatoric, color = c[1], alpha = 0.4, label = 'Aleatoric')\n","plt.plot(np.linspace(-5, 5, 200), means, color = 'black', linewidth = 1)\n","plt.xlim([-5, 5])\n","plt.ylim([-5, 7])\n","plt.xlabel('$x$', fontsize=30)\n","plt.title('SGLD', fontsize=40)\n","plt.tick_params(labelsize=30)\n","plt.xticks(np.arange(-4, 5, 2))\n","plt.yticks(np.arange(-4, 7, 2))\n","plt.gca().set_yticklabels([])\n","plt.gca().yaxis.grid(alpha=0.3)\n","plt.gca().xaxis.grid(alpha=0.3)\n","plt.savefig('sgld_hetero.pdf', bbox_inches = 'tight')\n","\n","files.download(\"sgld_hetero.pdf\")\n","\n","plt.show()"],"execution_count":12,"outputs":[{"output_type":"stream","text":["Using 50 networks for prediction\n"],"name":"stdout"},{"output_type":"display_data","data":{"text/plain":["<Figure size 432x360 with 1 Axes>"],"image/svg+xml":"<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"no\"?>\n<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\"\n  \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">\n<!-- Created with matplotlib (https://matplotlib.org/) -->\n<svg height=\"391.6625pt\" version=\"1.1\" viewBox=\"0 0 356.2 391.6625\" width=\"356.2pt\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\">\n <defs>\n  <style type=\"text/css\">\n*{stroke-linecap:butt;stroke-linejoin:round;}\n  </style>\n </defs>\n <g id=\"figure_1\">\n  <g id=\"patch_1\">\n   <path d=\"M 0 391.6625 \nL 356.2 391.6625 \nL 356.2 0 \nL 0 0 \nz\n\" style=\"fill:none;\"/>\n  </g>\n  <g id=\"axes_1\">\n   <g id=\"patch_2\">\n    <path d=\"M 10.7 315.39375 \nL 345.5 315.39375 \nL 345.5 43.59375 \nL 10.7 43.59375 \nz\n\" style=\"fill:#ffffff;\"/>\n   </g>\n   <g id=\"PathCollection_1\">\n    <defs>\n     <path d=\"M -1.581139 1.581139 \nL 1.581139 -1.581139 \nM -1.581139 -1.581139 \nL 1.581139 1.581139 \n\" id=\"mce012b09a6\" style=\"stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\"/>\n    </defs>\n    <g clip-path=\"url(#p54966e6dbe)\">\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"115.282121\" xlink:href=\"#mce012b09a6\" y=\"183.745982\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"116.790285\" xlink:href=\"#mce012b09a6\" y=\"181.154163\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"117.226193\" xlink:href=\"#mce012b09a6\" y=\"184.310171\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"117.439686\" xlink:href=\"#mce012b09a6\" y=\"180.988587\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"117.61485\" xlink:href=\"#mce012b09a6\" y=\"183.568276\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"118.00521\" xlink:href=\"#mce012b09a6\" y=\"182.96171\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"118.186179\" xlink:href=\"#mce012b09a6\" y=\"181.213327\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"118.368141\" xlink:href=\"#mce012b09a6\" y=\"182.115454\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"118.769818\" xlink:href=\"#mce012b09a6\" y=\"181.782198\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"118.999834\" xlink:href=\"#mce012b09a6\" y=\"176.162515\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"119.060893\" xlink:href=\"#mce012b09a6\" y=\"180.492926\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"119.252172\" xlink:href=\"#mce012b09a6\" y=\"181.48031\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"119.543981\" xlink:href=\"#mce012b09a6\" y=\"181.751128\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"120.165723\" xlink:href=\"#mce012b09a6\" y=\"176.093525\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"120.776029\" xlink:href=\"#mce012b09a6\" y=\"180.846384\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"120.83401\" xlink:href=\"#mce012b09a6\" y=\"178.993687\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"121.331799\" xlink:href=\"#mce012b09a6\" y=\"176.725288\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"121.61672\" xlink:href=\"#mce012b09a6\" y=\"177.550133\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"121.911832\" xlink:href=\"#mce012b09a6\" y=\"178.08241\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"121.915111\" xlink:href=\"#mce012b09a6\" y=\"179.677994\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"122.02192\" xlink:href=\"#mce012b09a6\" y=\"179.702639\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"122.03062\" xlink:href=\"#mce012b09a6\" y=\"175.781693\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"123.061291\" xlink:href=\"#mce012b09a6\" y=\"174.194523\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"123.583227\" xlink:href=\"#mce012b09a6\" y=\"176.44688\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"123.776831\" xlink:href=\"#mce012b09a6\" y=\"177.609507\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"124.287914\" xlink:href=\"#mce012b09a6\" y=\"175.176823\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"126.874423\" xlink:href=\"#mce012b09a6\" y=\"171.573863\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"127.038547\" xlink:href=\"#mce012b09a6\" y=\"171.20321\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"127.127421\" xlink:href=\"#mce012b09a6\" y=\"170.062427\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"127.312964\" xlink:href=\"#mce012b09a6\" y=\"177.288324\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"127.38802\" xlink:href=\"#mce012b09a6\" y=\"168.852561\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"127.625744\" xlink:href=\"#mce012b09a6\" y=\"166.738285\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"128.137713\" xlink:href=\"#mce012b09a6\" y=\"174.691756\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"128.592776\" xlink:href=\"#mce012b09a6\" y=\"173.46327\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"128.729058\" xlink:href=\"#mce012b09a6\" y=\"175.720468\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"129.784424\" xlink:href=\"#mce012b09a6\" y=\"167.604738\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"129.837526\" xlink:href=\"#mce012b09a6\" y=\"174.137305\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"130.033438\" xlink:href=\"#mce012b09a6\" y=\"171.268401\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"130.647831\" xlink:href=\"#mce012b09a6\" y=\"166.510209\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"130.982285\" xlink:href=\"#mce012b09a6\" y=\"165.631396\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"131.219809\" xlink:href=\"#mce012b09a6\" y=\"169.444306\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"131.260263\" xlink:href=\"#mce012b09a6\" y=\"170.082017\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"131.774822\" xlink:href=\"#mce012b09a6\" y=\"165.410918\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"132.713023\" xlink:href=\"#mce012b09a6\" y=\"168.101764\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"133.960294\" xlink:href=\"#mce012b09a6\" y=\"170.030795\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"134.561016\" xlink:href=\"#mce012b09a6\" y=\"172.194311\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"134.660867\" xlink:href=\"#mce012b09a6\" y=\"166.390854\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"135.032078\" xlink:href=\"#mce012b09a6\" y=\"159.899565\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"135.440171\" xlink:href=\"#mce012b09a6\" y=\"167.325476\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"135.850013\" xlink:href=\"#mce012b09a6\" y=\"161.393727\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"136.000924\" xlink:href=\"#mce012b09a6\" y=\"168.388002\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"136.183921\" xlink:href=\"#mce012b09a6\" y=\"167.815095\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"136.522507\" xlink:href=\"#mce012b09a6\" y=\"166.914173\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"136.954213\" xlink:href=\"#mce012b09a6\" y=\"165.173777\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"137.223481\" xlink:href=\"#mce012b09a6\" y=\"162.354901\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"137.854631\" xlink:href=\"#mce012b09a6\" y=\"175.640001\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"138.046895\" xlink:href=\"#mce012b09a6\" y=\"165.574169\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"139.202535\" xlink:href=\"#mce012b09a6\" y=\"166.582803\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"139.653361\" xlink:href=\"#mce012b09a6\" y=\"155.918683\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"140.240027\" xlink:href=\"#mce012b09a6\" y=\"162.644304\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"140.587373\" xlink:href=\"#mce012b09a6\" y=\"165.95634\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"142.650408\" xlink:href=\"#mce012b09a6\" y=\"167.997065\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"142.65298\" xlink:href=\"#mce012b09a6\" y=\"164.576319\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"142.695017\" xlink:href=\"#mce012b09a6\" y=\"171.364599\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"142.957411\" xlink:href=\"#mce012b09a6\" y=\"153.369613\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"143.265058\" xlink:href=\"#mce012b09a6\" y=\"156.652414\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"143.426944\" xlink:href=\"#mce012b09a6\" y=\"160.592024\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"144.017659\" xlink:href=\"#mce012b09a6\" y=\"168.698984\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"144.100463\" xlink:href=\"#mce012b09a6\" y=\"159.122172\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"144.326791\" xlink:href=\"#mce012b09a6\" y=\"163.735925\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"144.50482\" xlink:href=\"#mce012b09a6\" y=\"163.038622\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"145.430543\" xlink:href=\"#mce012b09a6\" y=\"164.648595\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"146.202257\" xlink:href=\"#mce012b09a6\" y=\"158.323096\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"146.283351\" xlink:href=\"#mce012b09a6\" y=\"161.186012\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"146.959574\" xlink:href=\"#mce012b09a6\" y=\"164.569631\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"147.212271\" xlink:href=\"#mce012b09a6\" y=\"158.832573\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"147.291037\" xlink:href=\"#mce012b09a6\" y=\"160.006427\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"147.76745\" xlink:href=\"#mce012b09a6\" y=\"167.822832\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"147.933104\" xlink:href=\"#mce012b09a6\" y=\"158.119806\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"148.143148\" xlink:href=\"#mce012b09a6\" y=\"151.376866\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"148.613607\" xlink:href=\"#mce012b09a6\" y=\"165.572564\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"149.298686\" xlink:href=\"#mce012b09a6\" y=\"148.108487\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"149.401265\" xlink:href=\"#mce012b09a6\" y=\"154.65154\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"149.478263\" xlink:href=\"#mce012b09a6\" y=\"159.673591\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"151.250862\" xlink:href=\"#mce012b09a6\" y=\"164.295913\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"152.240953\" xlink:href=\"#mce012b09a6\" y=\"160.157799\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"152.566723\" xlink:href=\"#mce012b09a6\" y=\"159.293898\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"153.066623\" xlink:href=\"#mce012b09a6\" y=\"176.356816\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"153.593124\" xlink:href=\"#mce012b09a6\" y=\"158.017565\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"153.835819\" xlink:href=\"#mce012b09a6\" y=\"172.028476\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"154.153614\" xlink:href=\"#mce012b09a6\" y=\"170.0426\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"155.275156\" xlink:href=\"#mce012b09a6\" y=\"181.30107\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"155.37844\" xlink:href=\"#mce012b09a6\" y=\"178.633338\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"155.378996\" xlink:href=\"#mce012b09a6\" y=\"175.783962\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"155.380541\" xlink:href=\"#mce012b09a6\" y=\"167.820451\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"155.505706\" xlink:href=\"#mce012b09a6\" y=\"175.108909\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"155.668571\" xlink:href=\"#mce012b09a6\" y=\"175.215663\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"157.037884\" xlink:href=\"#mce012b09a6\" y=\"191.288518\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"158.541906\" xlink:href=\"#mce012b09a6\" y=\"172.137137\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"159.272531\" xlink:href=\"#mce012b09a6\" y=\"196.848898\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"160.916736\" xlink:href=\"#mce012b09a6\" y=\"179.662366\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"160.920592\" xlink:href=\"#mce012b09a6\" y=\"192.764617\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"160.958559\" xlink:href=\"#mce012b09a6\" y=\"189.346291\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"161.006285\" xlink:href=\"#mce012b09a6\" y=\"179.891789\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"161.377431\" xlink:href=\"#mce012b09a6\" y=\"169.730086\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"161.594945\" xlink:href=\"#mce012b09a6\" y=\"200.2609\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"161.976459\" xlink:href=\"#mce012b09a6\" y=\"169.416664\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"162.103484\" xlink:href=\"#mce012b09a6\" y=\"157.326407\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"163.107782\" xlink:href=\"#mce012b09a6\" y=\"184.28758\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"163.545433\" xlink:href=\"#mce012b09a6\" y=\"162.680204\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"163.661213\" xlink:href=\"#mce012b09a6\" y=\"196.596315\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"163.745172\" xlink:href=\"#mce012b09a6\" y=\"193.33812\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"163.955762\" xlink:href=\"#mce012b09a6\" y=\"182.784141\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"164.518321\" xlink:href=\"#mce012b09a6\" y=\"180.68964\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"165.059604\" xlink:href=\"#mce012b09a6\" y=\"182.782655\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"165.107562\" xlink:href=\"#mce012b09a6\" y=\"196.650664\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"165.242656\" xlink:href=\"#mce012b09a6\" y=\"188.227081\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"165.378245\" xlink:href=\"#mce012b09a6\" y=\"197.454641\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"165.39379\" xlink:href=\"#mce012b09a6\" y=\"187.583212\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"166.319376\" xlink:href=\"#mce012b09a6\" y=\"212.806736\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"166.965411\" xlink:href=\"#mce012b09a6\" y=\"215.323445\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"166.966078\" xlink:href=\"#mce012b09a6\" y=\"205.136809\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"167.657196\" xlink:href=\"#mce012b09a6\" y=\"175.632516\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"168.101636\" xlink:href=\"#mce012b09a6\" y=\"186.316011\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"170.414111\" xlink:href=\"#mce012b09a6\" y=\"189.260035\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"170.757687\" xlink:href=\"#mce012b09a6\" y=\"211.491526\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"170.910977\" xlink:href=\"#mce012b09a6\" y=\"199.685227\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"171.62915\" xlink:href=\"#mce012b09a6\" y=\"178.349507\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"172.356812\" xlink:href=\"#mce012b09a6\" y=\"197.098183\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"172.517261\" xlink:href=\"#mce012b09a6\" y=\"214.894923\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"173.627679\" xlink:href=\"#mce012b09a6\" y=\"221.455055\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"174.375316\" xlink:href=\"#mce012b09a6\" y=\"203.916393\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"174.699069\" xlink:href=\"#mce012b09a6\" y=\"232.52465\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"175.943788\" xlink:href=\"#mce012b09a6\" y=\"199.166009\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"175.950843\" xlink:href=\"#mce012b09a6\" y=\"200.526081\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"176.516885\" xlink:href=\"#mce012b09a6\" y=\"205.169629\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"176.942296\" xlink:href=\"#mce012b09a6\" y=\"221.375549\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"177.442621\" xlink:href=\"#mce012b09a6\" y=\"240.354844\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"177.512185\" xlink:href=\"#mce012b09a6\" y=\"205.81423\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"178.10168\" xlink:href=\"#mce012b09a6\" y=\"186.644896\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"178.344361\" xlink:href=\"#mce012b09a6\" y=\"209.250242\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"178.658196\" xlink:href=\"#mce012b09a6\" y=\"214.547905\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"178.704001\" xlink:href=\"#mce012b09a6\" y=\"212.517677\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"178.903608\" xlink:href=\"#mce012b09a6\" y=\"228.258754\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"179.116839\" xlink:href=\"#mce012b09a6\" y=\"233.612599\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"179.151952\" xlink:href=\"#mce012b09a6\" y=\"199.407735\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"179.153835\" xlink:href=\"#mce012b09a6\" y=\"228.485579\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"179.322723\" xlink:href=\"#mce012b09a6\" y=\"218.177785\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"179.630201\" xlink:href=\"#mce012b09a6\" y=\"244.578682\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"180.827573\" xlink:href=\"#mce012b09a6\" y=\"229.276135\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"180.993086\" xlink:href=\"#mce012b09a6\" y=\"225.244876\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"182.754254\" xlink:href=\"#mce012b09a6\" y=\"241.890866\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"182.934554\" xlink:href=\"#mce012b09a6\" y=\"232.739995\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"183.269295\" xlink:href=\"#mce012b09a6\" y=\"243.679858\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"183.451127\" xlink:href=\"#mce012b09a6\" y=\"225.35709\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"183.954064\" xlink:href=\"#mce012b09a6\" y=\"208.92388\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"184.95035\" xlink:href=\"#mce012b09a6\" y=\"217.664911\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"185.252166\" xlink:href=\"#mce012b09a6\" y=\"267.985189\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"185.400677\" xlink:href=\"#mce012b09a6\" y=\"243.595919\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"186.173672\" xlink:href=\"#mce012b09a6\" y=\"223.372485\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"186.80719\" xlink:href=\"#mce012b09a6\" y=\"226.273183\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"186.980535\" xlink:href=\"#mce012b09a6\" y=\"241.722084\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"187.723715\" xlink:href=\"#mce012b09a6\" y=\"229.501168\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"188.042921\" xlink:href=\"#mce012b09a6\" y=\"269.10302\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"188.076199\" xlink:href=\"#mce012b09a6\" y=\"233.383498\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"188.340442\" xlink:href=\"#mce012b09a6\" y=\"268.892868\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"189.52207\" xlink:href=\"#mce012b09a6\" y=\"237.629011\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"189.942036\" xlink:href=\"#mce012b09a6\" y=\"229.669792\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"190.830993\" xlink:href=\"#mce012b09a6\" y=\"239.188154\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"191.587912\" xlink:href=\"#mce012b09a6\" y=\"218.562259\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"191.98895\" xlink:href=\"#mce012b09a6\" y=\"284.985775\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"192.216683\" xlink:href=\"#mce012b09a6\" y=\"235.375936\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"192.726532\" xlink:href=\"#mce012b09a6\" y=\"233.553427\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"194.171239\" xlink:href=\"#mce012b09a6\" y=\"245.758913\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"194.70974\" xlink:href=\"#mce012b09a6\" y=\"252.135443\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"196.135406\" xlink:href=\"#mce012b09a6\" y=\"241.368148\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"196.431777\" xlink:href=\"#mce012b09a6\" y=\"240.539571\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"196.59795\" xlink:href=\"#mce012b09a6\" y=\"258.046212\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"197.534198\" xlink:href=\"#mce012b09a6\" y=\"278.541079\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"197.588684\" xlink:href=\"#mce012b09a6\" y=\"258.636908\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"198.553023\" xlink:href=\"#mce012b09a6\" y=\"235.23151\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"199.883861\" xlink:href=\"#mce012b09a6\" y=\"287.641248\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"199.981888\" xlink:href=\"#mce012b09a6\" y=\"276.757836\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"200.415619\" xlink:href=\"#mce012b09a6\" y=\"244.422377\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"201.252339\" xlink:href=\"#mce012b09a6\" y=\"243.37309\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"202.059152\" xlink:href=\"#mce012b09a6\" y=\"221.544045\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"202.133577\" xlink:href=\"#mce012b09a6\" y=\"256.876558\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"202.433364\" xlink:href=\"#mce012b09a6\" y=\"226.539705\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"202.885521\" xlink:href=\"#mce012b09a6\" y=\"258.520946\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"202.921373\" xlink:href=\"#mce012b09a6\" y=\"261.899154\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"203.479975\" xlink:href=\"#mce012b09a6\" y=\"229.375199\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"204.01542\" xlink:href=\"#mce012b09a6\" y=\"265.651291\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"204.085781\" xlink:href=\"#mce012b09a6\" y=\"257.576941\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"206.304903\" xlink:href=\"#mce012b09a6\" y=\"239.776319\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"206.411754\" xlink:href=\"#mce012b09a6\" y=\"247.332661\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"206.587147\" xlink:href=\"#mce012b09a6\" y=\"240.68831\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"206.715842\" xlink:href=\"#mce012b09a6\" y=\"241.050691\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"206.8884\" xlink:href=\"#mce012b09a6\" y=\"292.666154\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"206.994968\" xlink:href=\"#mce012b09a6\" y=\"262.811567\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"207.132421\" xlink:href=\"#mce012b09a6\" y=\"252.569244\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"208.815948\" xlink:href=\"#mce012b09a6\" y=\"270.478986\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"208.838761\" xlink:href=\"#mce012b09a6\" y=\"262.642727\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"209.741088\" xlink:href=\"#mce012b09a6\" y=\"300.852771\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"210.538369\" xlink:href=\"#mce012b09a6\" y=\"261.429675\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"210.63609\" xlink:href=\"#mce012b09a6\" y=\"255.302171\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"210.683142\" xlink:href=\"#mce012b09a6\" y=\"234.67324\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"211.867795\" xlink:href=\"#mce012b09a6\" y=\"235.473545\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"211.933625\" xlink:href=\"#mce012b09a6\" y=\"257.130781\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"213.773243\" xlink:href=\"#mce012b09a6\" y=\"245.910672\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"213.941313\" xlink:href=\"#mce012b09a6\" y=\"291.836943\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"216.116917\" xlink:href=\"#mce012b09a6\" y=\"274.84276\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"216.749161\" xlink:href=\"#mce012b09a6\" y=\"228.864738\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"216.936899\" xlink:href=\"#mce012b09a6\" y=\"270.359863\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"217.101363\" xlink:href=\"#mce012b09a6\" y=\"242.042929\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"217.118754\" xlink:href=\"#mce012b09a6\" y=\"234.969813\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"218.179702\" xlink:href=\"#mce012b09a6\" y=\"248.678638\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"218.427131\" xlink:href=\"#mce012b09a6\" y=\"250.480371\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"218.730841\" xlink:href=\"#mce012b09a6\" y=\"273.962201\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"219.205572\" xlink:href=\"#mce012b09a6\" y=\"239.259336\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"219.381448\" xlink:href=\"#mce012b09a6\" y=\"241.50959\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"219.877249\" xlink:href=\"#mce012b09a6\" y=\"245.602955\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"220.888905\" xlink:href=\"#mce012b09a6\" y=\"252.298505\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"220.926001\" xlink:href=\"#mce012b09a6\" y=\"282.320678\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"220.952035\" xlink:href=\"#mce012b09a6\" y=\"248.952264\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"221.43596\" xlink:href=\"#mce012b09a6\" y=\"268.543346\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"221.439274\" xlink:href=\"#mce012b09a6\" y=\"259.222848\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"222.230833\" xlink:href=\"#mce012b09a6\" y=\"255.137645\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"224.101681\" xlink:href=\"#mce012b09a6\" y=\"257.997501\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"224.439039\" xlink:href=\"#mce012b09a6\" y=\"224.457793\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"225.039589\" xlink:href=\"#mce012b09a6\" y=\"254.807236\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"225.657749\" xlink:href=\"#mce012b09a6\" y=\"231.628853\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"225.989744\" xlink:href=\"#mce012b09a6\" y=\"211.816339\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"226.663752\" xlink:href=\"#mce012b09a6\" y=\"226.799433\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"227.993369\" xlink:href=\"#mce012b09a6\" y=\"217.284043\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"228.0745\" xlink:href=\"#mce012b09a6\" y=\"257.777103\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"230.797904\" xlink:href=\"#mce012b09a6\" y=\"234.079812\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"231.325137\" xlink:href=\"#mce012b09a6\" y=\"226.049533\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"232.309408\" xlink:href=\"#mce012b09a6\" y=\"241.547987\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"233.502284\" xlink:href=\"#mce012b09a6\" y=\"185.910504\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"233.655209\" xlink:href=\"#mce012b09a6\" y=\"209.474146\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"233.764451\" xlink:href=\"#mce012b09a6\" y=\"168.329657\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"233.861859\" xlink:href=\"#mce012b09a6\" y=\"184.317828\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"233.946279\" xlink:href=\"#mce012b09a6\" y=\"203.349526\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"234.679745\" xlink:href=\"#mce012b09a6\" y=\"229.016515\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"235.418124\" xlink:href=\"#mce012b09a6\" y=\"238.103036\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"235.511075\" xlink:href=\"#mce012b09a6\" y=\"222.431073\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"236.404175\" xlink:href=\"#mce012b09a6\" y=\"242.873144\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"236.823294\" xlink:href=\"#mce012b09a6\" y=\"200.763065\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"237.085892\" xlink:href=\"#mce012b09a6\" y=\"170.92805\"/>\n     <use style=\"fill-opacity:0.5;stroke:#000000;stroke-opacity:0.5;stroke-width:1.5;\" x=\"237.842704\" xlink:href=\"#mce012b09a6\" y=\"205.72178\"/>\n    </g>\n   </g>\n   <g id=\"PolyCollection_1\">\n    <path clip-path=\"url(#p54966e6dbe)\" d=\"M 10.7 228.320943 \nL 10.7 238.041779 \nL 12.382412 237.043218 \nL 14.064824 236.045056 \nL 15.747236 235.046774 \nL 17.429648 234.048434 \nL 19.11206 233.050608 \nL 20.794472 232.05192 \nL 22.476884 231.052643 \nL 24.159296 230.054182 \nL 25.841709 229.054644 \nL 27.524121 228.054471 \nL 29.206533 227.053537 \nL 30.888945 226.051269 \nL 32.571357 225.047467 \nL 34.253769 224.041265 \nL 35.936181 223.033553 \nL 37.618593 222.024913 \nL 39.301005 221.016372 \nL 40.983417 220.007373 \nL 42.665829 218.998307 \nL 44.348241 217.988256 \nL 46.030653 216.976672 \nL 47.713065 215.964903 \nL 49.395477 214.951426 \nL 51.077889 213.935957 \nL 52.760302 212.919306 \nL 54.442714 211.901312 \nL 56.125126 210.882891 \nL 57.807538 209.862461 \nL 59.48995 208.846234 \nL 61.172362 207.829245 \nL 62.854774 206.811501 \nL 64.537186 205.790404 \nL 66.219598 204.768483 \nL 67.90201 203.744546 \nL 69.584422 202.720107 \nL 71.266834 201.694982 \nL 72.949246 200.668786 \nL 74.631658 199.640317 \nL 76.31407 198.608838 \nL 77.996482 197.571588 \nL 79.678894 196.53246 \nL 81.361307 195.49283 \nL 83.043719 194.452298 \nL 84.726131 193.408487 \nL 86.408543 192.360978 \nL 88.090955 191.308024 \nL 89.773367 190.251129 \nL 91.455779 189.191485 \nL 93.138191 188.129742 \nL 94.820603 187.065803 \nL 96.503015 185.998048 \nL 98.185427 184.928106 \nL 99.867839 183.857891 \nL 101.550251 182.781153 \nL 103.232663 181.697623 \nL 104.915075 180.61162 \nL 106.597487 179.518194 \nL 108.279899 178.425043 \nL 109.962312 177.328383 \nL 111.644724 176.232365 \nL 113.327136 175.139033 \nL 115.009548 174.046817 \nL 116.69196 172.950389 \nL 118.374372 171.849316 \nL 120.056784 170.747155 \nL 121.739196 169.64125 \nL 123.421608 168.533532 \nL 125.10402 167.429238 \nL 126.786432 166.333339 \nL 128.468844 165.24432 \nL 130.151256 164.174981 \nL 131.833668 163.132846 \nL 133.51608 162.122843 \nL 135.198492 161.141535 \nL 136.880905 160.197378 \nL 138.563317 159.314719 \nL 140.245729 158.524119 \nL 141.928141 157.865313 \nL 143.610553 157.390124 \nL 145.292965 157.21241 \nL 146.975377 157.357464 \nL 148.657789 157.855396 \nL 150.340201 158.716935 \nL 152.022613 159.942449 \nL 153.705025 161.458045 \nL 155.387437 163.218647 \nL 157.069849 165.20306 \nL 158.752261 167.351565 \nL 160.434673 169.613946 \nL 162.117085 171.970653 \nL 163.799497 174.413003 \nL 165.48191 176.943463 \nL 167.164322 179.531354 \nL 168.846734 182.174936 \nL 170.529146 184.851977 \nL 172.211558 187.52193 \nL 173.89397 190.16819 \nL 175.576382 192.762157 \nL 177.258794 195.342452 \nL 178.941206 197.868667 \nL 180.623618 200.328984 \nL 182.30603 202.688914 \nL 183.988442 204.946163 \nL 185.670854 207.073972 \nL 187.353266 209.066753 \nL 189.035678 210.931036 \nL 190.71809 212.675939 \nL 192.400503 214.25515 \nL 194.082915 215.670487 \nL 195.765327 216.921815 \nL 197.447739 218.035579 \nL 199.130151 219.001827 \nL 200.812563 219.855779 \nL 202.494975 220.590754 \nL 204.177387 221.230273 \nL 205.859799 221.766951 \nL 207.542211 222.208314 \nL 209.224623 222.560508 \nL 210.907035 222.843215 \nL 212.589447 223.058739 \nL 214.271859 223.204118 \nL 215.954271 223.293372 \nL 217.636683 223.344622 \nL 219.319095 223.340218 \nL 221.001508 223.280268 \nL 222.68392 223.156053 \nL 224.366332 222.97923 \nL 226.048744 222.735755 \nL 227.731156 222.431565 \nL 229.413568 222.070099 \nL 231.09598 221.647912 \nL 232.778392 221.179495 \nL 234.460804 220.666332 \nL 236.143216 220.107187 \nL 237.825628 219.50572 \nL 239.50804 218.844373 \nL 241.190452 218.125099 \nL 242.872864 217.345842 \nL 244.555276 216.495919 \nL 246.237688 215.576225 \nL 247.920101 214.584843 \nL 249.602513 213.521348 \nL 251.284925 212.379415 \nL 252.967337 211.158758 \nL 254.649749 209.866235 \nL 256.332161 208.496161 \nL 258.014573 207.031319 \nL 259.696985 205.485506 \nL 261.379397 203.843263 \nL 263.061809 202.112888 \nL 264.744221 200.285996 \nL 266.426633 198.348913 \nL 268.109045 196.304691 \nL 269.791457 194.146185 \nL 271.473869 191.856784 \nL 273.156281 189.430499 \nL 274.838693 186.865747 \nL 276.521106 184.162223 \nL 278.203518 181.311526 \nL 279.88593 178.305921 \nL 281.568342 175.152091 \nL 283.250754 171.824921 \nL 284.933166 168.295492 \nL 286.615578 164.56272 \nL 288.29799 160.634439 \nL 289.980402 156.501605 \nL 291.662814 152.166778 \nL 293.345226 147.604386 \nL 295.027638 142.807319 \nL 296.71005 137.773168 \nL 298.392462 132.473976 \nL 300.074874 126.902215 \nL 301.757286 121.049159 \nL 303.439698 114.899919 \nL 305.122111 108.440774 \nL 306.804523 101.650576 \nL 308.486935 94.511893 \nL 310.169347 87.023365 \nL 311.851759 79.168543 \nL 313.534171 70.926258 \nL 315.216583 62.279488 \nL 316.898995 53.194429 \nL 318.581407 43.653854 \nL 320.263819 33.608513 \nL 321.946231 23.039247 \nL 323.628643 11.959188 \nL 325.311055 0.336854 \nL 326.993467 -11.864509 \nL 328.675879 -24.669763 \nL 330.358291 -38.109042 \nL 332.040704 -52.245765 \nL 333.723116 -67.079046 \nL 335.405528 -82.636081 \nL 337.08794 -98.961649 \nL 338.770352 -116.097027 \nL 340.452764 -134.105291 \nL 342.135176 -153.029858 \nL 343.817588 -172.896043 \nL 345.5 -193.752232 \nL 345.5 -195.095279 \nL 345.5 -195.095279 \nL 343.817588 -174.267084 \nL 342.135176 -154.428893 \nL 340.452764 -135.532192 \nL 338.770352 -117.55162 \nL 337.08794 -100.443804 \nL 335.405528 -84.14567 \nL 333.723116 -68.615894 \nL 332.040704 -53.809743 \nL 330.358291 -39.699935 \nL 328.675879 -26.287052 \nL 326.993467 -13.507848 \nL 325.311055 -1.332298 \nL 323.628643 10.264568 \nL 321.946231 21.319657 \nL 320.263819 31.864514 \nL 318.581407 41.886267 \nL 316.898995 51.403988 \nL 315.216583 60.467101 \nL 313.534171 69.092788 \nL 311.851759 77.314769 \nL 310.169347 85.150129 \nL 308.486935 92.619994 \nL 306.804523 99.74092 \nL 305.122111 106.514464 \nL 303.439698 112.958079 \nL 301.757286 119.092987 \nL 300.074874 124.932975 \nL 298.392462 130.493017 \nL 296.71005 135.781916 \nL 295.027638 140.807243 \nL 293.345226 145.596988 \nL 291.662814 150.153504 \nL 289.980402 154.483957 \nL 288.29799 158.613832 \nL 286.615578 162.540136 \nL 284.933166 166.272282 \nL 283.250754 169.803126 \nL 281.568342 173.134303 \nL 279.88593 176.294224 \nL 278.203518 179.308664 \nL 276.521106 182.170506 \nL 274.838693 184.887316 \nL 273.156281 187.467824 \nL 271.473869 189.912146 \nL 269.791457 192.222246 \nL 268.109045 194.403703 \nL 266.426633 196.472631 \nL 264.744221 198.436288 \nL 263.061809 200.291888 \nL 261.379397 202.053308 \nL 259.696985 203.72859 \nL 258.014573 205.308872 \nL 256.332161 206.809814 \nL 254.649749 208.218743 \nL 252.967337 209.551858 \nL 251.284925 210.814842 \nL 249.602513 212.001672 \nL 247.920101 213.111452 \nL 246.237688 214.150874 \nL 244.555276 215.11985 \nL 242.872864 216.020082 \nL 241.190452 216.850845 \nL 239.50804 217.622369 \nL 237.825628 218.335736 \nL 236.143216 218.990508 \nL 234.460804 219.605272 \nL 232.778392 220.174516 \nL 231.09598 220.698671 \nL 229.413568 221.177665 \nL 227.731156 221.597156 \nL 226.048744 221.95848 \nL 224.366332 222.257398 \nL 222.68392 222.488437 \nL 221.001508 222.664618 \nL 219.319095 222.773345 \nL 217.636683 222.824442 \nL 215.954271 222.819052 \nL 214.271859 222.776666 \nL 212.589447 222.676415 \nL 210.907035 222.503587 \nL 209.224623 222.257552 \nL 207.542211 221.935599 \nL 205.859799 221.520228 \nL 204.177387 221.004208 \nL 202.494975 220.382701 \nL 200.812563 219.659947 \nL 199.130151 218.811816 \nL 197.447739 217.848432 \nL 195.765327 216.728909 \nL 194.082915 215.466984 \nL 192.400503 214.041415 \nL 190.71809 212.451391 \nL 189.035678 210.707318 \nL 187.353266 208.850127 \nL 185.670854 206.867443 \nL 183.988442 204.75054 \nL 182.30603 202.504446 \nL 180.623618 200.152281 \nL 178.941206 197.696824 \nL 177.258794 195.175282 \nL 175.576382 192.597015 \nL 173.89397 190.00553 \nL 172.211558 187.360935 \nL 170.529146 184.690984 \nL 168.846734 182.012556 \nL 167.164322 179.364027 \nL 165.48191 176.765055 \nL 163.799497 174.216029 \nL 162.117085 171.749783 \nL 160.434673 169.362301 \nL 158.752261 167.068031 \nL 157.069849 164.887719 \nL 155.387437 162.869585 \nL 153.705025 161.075197 \nL 152.022613 159.522493 \nL 150.340201 158.251115 \nL 148.657789 157.315521 \nL 146.975377 156.732468 \nL 145.292965 156.502534 \nL 143.610553 156.591783 \nL 141.928141 157.005145 \nL 140.245729 157.612306 \nL 138.563317 158.359937 \nL 136.880905 159.198448 \nL 135.198492 160.095026 \nL 133.51608 161.0247 \nL 131.833668 161.982438 \nL 130.151256 162.971497 \nL 128.468844 163.98715 \nL 126.786432 165.013619 \nL 125.10402 166.043544 \nL 123.421608 167.079809 \nL 121.739196 168.118474 \nL 120.056784 169.152088 \nL 118.374372 170.179923 \nL 116.69196 171.202798 \nL 115.009548 172.218767 \nL 113.327136 173.228709 \nL 111.644724 174.236852 \nL 109.962312 175.243597 \nL 108.279899 176.2489 \nL 106.597487 177.251757 \nL 104.915075 178.253706 \nL 103.232663 179.249246 \nL 101.550251 180.239473 \nL 99.867839 181.220739 \nL 98.185427 182.195077 \nL 96.503015 183.166629 \nL 94.820603 184.132125 \nL 93.138191 185.091537 \nL 91.455779 186.048138 \nL 89.773367 187.000136 \nL 88.090955 187.946205 \nL 86.408543 188.886109 \nL 84.726131 189.82029 \nL 83.043719 190.74925 \nL 81.361307 191.673665 \nL 79.678894 192.594905 \nL 77.996482 193.513276 \nL 76.31407 194.427921 \nL 74.631658 195.335685 \nL 72.949246 196.23949 \nL 71.266834 197.140083 \nL 69.584422 198.037677 \nL 67.90201 198.934143 \nL 66.219598 199.829637 \nL 64.537186 200.721266 \nL 62.854774 201.609499 \nL 61.172362 202.493648 \nL 59.48995 203.375263 \nL 57.807538 204.252758 \nL 56.125126 205.132905 \nL 54.442714 206.011434 \nL 52.760302 206.888165 \nL 51.077889 207.76255 \nL 49.395477 208.634387 \nL 47.713065 209.504134 \nL 46.030653 210.372396 \nL 44.348241 211.240357 \nL 42.665829 212.105327 \nL 40.983417 212.969042 \nL 39.301005 213.831719 \nL 37.618593 214.693587 \nL 35.936181 215.555065 \nL 34.253769 216.414906 \nL 32.571357 217.273434 \nL 30.888945 218.13101 \nL 29.206533 218.985996 \nL 27.524121 219.839279 \nL 25.841709 220.692355 \nL 24.159296 221.543748 \nL 22.476884 222.393362 \nL 20.794472 223.242493 \nL 19.11206 224.090383 \nL 17.429648 224.937063 \nL 15.747236 225.783816 \nL 14.064824 226.629847 \nL 12.382412 227.475258 \nL 10.7 228.320943 \nz\n\" style=\"fill:#1f77b4;fill-opacity:0.3;stroke:#1f77b4;stroke-opacity:0.3;\"/>\n   </g>\n   <g id=\"PolyCollection_2\">\n    <path clip-path=\"url(#p54966e6dbe)\" d=\"M 10.7 250.436622 \nL 10.7 260.157455 \nL 12.382412 258.977521 \nL 14.064824 257.799083 \nL 15.747236 256.62193 \nL 17.429648 255.4463 \nL 19.11206 254.272581 \nL 20.794472 253.099516 \nL 22.476884 251.927682 \nL 24.159296 250.759164 \nL 25.841709 249.591434 \nL 27.524121 248.425298 \nL 29.206533 247.259442 \nL 30.888945 246.09455 \nL 32.571357 244.930465 \nL 34.253769 243.765087 \nL 35.936181 242.600295 \nL 37.618593 241.436843 \nL 39.301005 240.275555 \nL 40.983417 239.115524 \nL 42.665829 237.957857 \nL 44.348241 236.801095 \nL 46.030653 235.646115 \nL 47.713065 234.492704 \nL 49.395477 233.339898 \nL 51.077889 232.187267 \nL 52.760302 231.036796 \nL 54.442714 229.887859 \nL 56.125126 228.742033 \nL 57.807538 227.59551 \nL 59.48995 226.446378 \nL 61.172362 225.296226 \nL 62.854774 224.147923 \nL 64.537186 222.999467 \nL 66.219598 221.854326 \nL 67.90201 220.710671 \nL 69.584422 219.57012 \nL 71.266834 218.431756 \nL 72.949246 217.296787 \nL 74.631658 216.164129 \nL 76.31407 215.03342 \nL 77.996482 213.905406 \nL 79.678894 212.780258 \nL 81.361307 211.660068 \nL 83.043719 210.544292 \nL 84.726131 209.431723 \nL 86.408543 208.32079 \nL 88.090955 207.208728 \nL 89.773367 206.098553 \nL 91.455779 204.991902 \nL 93.138191 203.889103 \nL 94.820603 202.789427 \nL 96.503015 201.688559 \nL 98.185427 200.592335 \nL 99.867839 199.501282 \nL 101.550251 198.416144 \nL 103.232663 197.337742 \nL 104.915075 196.263719 \nL 106.597487 195.200749 \nL 108.279899 194.143688 \nL 109.962312 193.082254 \nL 111.644724 192.026129 \nL 113.327136 190.974596 \nL 115.009548 189.925398 \nL 116.69196 188.883167 \nL 118.374372 187.852533 \nL 120.056784 186.822997 \nL 121.739196 185.799656 \nL 123.421608 184.782821 \nL 125.10402 183.77365 \nL 126.786432 182.777913 \nL 128.468844 181.802341 \nL 130.151256 180.8585 \nL 131.833668 179.951741 \nL 133.51608 179.097215 \nL 135.198492 178.286658 \nL 136.880905 177.528811 \nL 138.563317 176.859542 \nL 140.245729 176.304995 \nL 141.928141 175.890339 \nL 143.610553 175.675072 \nL 145.292965 175.77198 \nL 146.975377 176.215068 \nL 148.657789 177.047949 \nL 150.340201 178.313581 \nL 152.022613 180.048258 \nL 153.705025 182.141745 \nL 155.387437 184.540165 \nL 157.069849 187.251333 \nL 158.752261 190.146272 \nL 160.434673 193.179269 \nL 162.117085 196.331464 \nL 163.799497 199.598481 \nL 165.48191 202.941053 \nL 167.164322 206.382476 \nL 168.846734 209.880206 \nL 170.529146 213.417739 \nL 172.211558 216.968792 \nL 173.89397 220.517867 \nL 175.576382 224.045747 \nL 177.258794 227.564078 \nL 178.941206 231.012031 \nL 180.623618 234.368811 \nL 182.30603 237.663132 \nL 183.988442 240.879458 \nL 185.670854 243.982099 \nL 187.353266 246.965618 \nL 189.035678 249.78623 \nL 190.71809 252.490208 \nL 192.400503 255.018465 \nL 194.082915 257.387216 \nL 195.765327 259.593406 \nL 197.447739 261.683417 \nL 199.130151 263.661811 \nL 200.812563 265.554531 \nL 202.494975 267.378334 \nL 204.177387 269.166874 \nL 205.859799 270.938111 \nL 207.542211 272.686658 \nL 209.224623 274.426974 \nL 210.907035 276.162765 \nL 212.589447 277.904755 \nL 214.271859 279.667601 \nL 215.954271 281.452847 \nL 217.636683 283.264661 \nL 219.319095 285.096576 \nL 221.001508 286.958461 \nL 222.68392 288.846461 \nL 224.366332 290.775368 \nL 226.048744 292.753157 \nL 227.731156 294.782513 \nL 229.413568 296.862101 \nL 231.09598 298.984026 \nL 232.778392 301.154359 \nL 234.460804 303.364977 \nL 236.143216 305.613429 \nL 237.825628 307.896863 \nL 239.50804 310.228823 \nL 241.190452 312.610907 \nL 242.872864 315.038937 \nL 244.555276 317.535246 \nL 246.237688 320.09882 \nL 247.920101 322.716633 \nL 249.602513 325.396257 \nL 251.284925 328.154226 \nL 252.967337 330.988457 \nL 254.649749 333.887533 \nL 256.332161 336.86556 \nL 258.014573 339.937755 \nL 259.696985 343.088016 \nL 261.379397 346.335663 \nL 263.061809 349.678947 \nL 264.744221 353.120256 \nL 266.426633 356.668941 \nL 268.109045 360.322844 \nL 269.791457 364.087839 \nL 271.473869 367.981683 \nL 273.156281 372.008446 \nL 274.838693 376.165981 \nL 276.521106 380.451824 \nL 278.203518 384.882542 \nL 279.88593 389.468916 \nL 281.568342 394.201202 \nL 283.250754 399.103594 \nL 284.933166 404.208321 \nL 286.615578 409.514236 \nL 288.29799 415.01283 \nL 289.980402 420.719438 \nL 291.662814 426.626674 \nL 293.345226 432.76128 \nL 295.027638 439.130167 \nL 296.71005 445.735214 \nL 298.392462 452.603272 \nL 300.074874 459.74216 \nL 301.757286 467.158034 \nL 303.439698 474.869492 \nL 305.122111 482.8874 \nL 306.804523 491.234978 \nL 308.486935 499.929679 \nL 310.169347 508.972433 \nL 311.851759 518.380931 \nL 313.534171 528.176297 \nL 315.216583 538.374387 \nL 316.898995 549.008359 \nL 318.581407 560.093526 \nL 320.263819 571.683178 \nL 321.946231 583.795092 \nL 323.628643 596.41592 \nL 325.311055 609.577715 \nL 326.993467 623.319145 \nL 328.675879 637.662587 \nL 330.358291 652.638757 \nL 332.040704 668.309996 \nL 333.723116 684.676172 \nL 335.405528 701.764245 \nL 337.08794 719.619619 \nL 338.770352 738.284847 \nL 340.452764 757.821535 \nL 342.135176 778.27297 \nL 343.817588 799.662266 \nL 345.5 822.041479 \nL 345.5 820.698433 \nL 345.5 820.698433 \nL 343.817588 798.291225 \nL 342.135176 776.873935 \nL 340.452764 756.394635 \nL 338.770352 736.830255 \nL 337.08794 718.137464 \nL 335.405528 700.254657 \nL 333.723116 683.139324 \nL 332.040704 666.746017 \nL 330.358291 651.047864 \nL 328.675879 636.045298 \nL 326.993467 621.675762 \nL 325.311055 607.908584 \nL 323.628643 594.7213 \nL 321.946231 582.075502 \nL 320.263819 569.939179 \nL 318.581407 558.325939 \nL 316.898995 547.217918 \nL 315.216583 536.562 \nL 313.534171 526.342828 \nL 311.851759 516.527157 \nL 310.169347 507.099197 \nL 308.486935 498.037758 \nL 306.804523 489.325323 \nL 305.122111 480.961091 \nL 303.439698 472.927652 \nL 301.757286 465.201851 \nL 300.074874 457.772909 \nL 298.392462 450.622313 \nL 296.71005 443.743952 \nL 295.027638 437.130091 \nL 293.345226 430.753881 \nL 291.662814 424.613401 \nL 289.980402 418.701801 \nL 288.29799 412.992212 \nL 286.615578 407.491652 \nL 284.933166 402.18511 \nL 283.250754 397.08181 \nL 281.568342 392.183414 \nL 279.88593 387.457219 \nL 278.203518 382.879691 \nL 276.521106 378.460107 \nL 274.838693 374.187549 \nL 273.156281 370.045762 \nL 271.473869 366.037045 \nL 269.791457 362.163895 \nL 268.109045 358.421851 \nL 266.426633 354.792659 \nL 264.744221 351.270542 \nL 263.061809 347.857952 \nL 261.379397 344.545708 \nL 259.696985 341.331099 \nL 258.014573 338.215303 \nL 256.332161 335.179213 \nL 254.649749 332.240035 \nL 252.967337 329.381558 \nL 251.284925 326.589653 \nL 249.602513 323.876581 \nL 247.920101 321.243247 \nL 246.237688 318.673474 \nL 244.555276 316.159182 \nL 242.872864 313.713181 \nL 241.190452 311.336648 \nL 239.50804 309.006827 \nL 237.825628 306.726881 \nL 236.143216 304.496747 \nL 234.460804 302.303928 \nL 232.778392 300.149374 \nL 231.09598 298.034782 \nL 229.413568 295.969667 \nL 227.731156 293.948098 \nL 226.048744 291.975887 \nL 224.366332 290.053537 \nL 222.68392 288.178842 \nL 221.001508 286.342813 \nL 219.319095 284.529708 \nL 217.636683 282.744484 \nL 215.954271 280.978527 \nL 214.271859 279.240144 \nL 212.589447 277.522433 \nL 210.907035 275.823137 \nL 209.224623 274.124024 \nL 207.542211 272.413948 \nL 205.859799 270.691388 \nL 204.177387 268.940811 \nL 202.494975 267.170276 \nL 200.812563 265.358699 \nL 199.130151 263.4718 \nL 197.447739 261.496268 \nL 195.765327 259.400501 \nL 194.082915 257.183711 \nL 192.400503 254.804726 \nL 190.71809 252.265658 \nL 189.035678 249.562511 \nL 187.353266 246.74899 \nL 185.670854 243.775572 \nL 183.988442 240.683836 \nL 182.30603 237.478661 \nL 180.623618 234.192109 \nL 178.941206 230.840189 \nL 177.258794 227.396907 \nL 175.576382 223.880604 \nL 173.89397 220.355206 \nL 172.211558 216.807797 \nL 170.529146 213.256746 \nL 168.846734 209.717826 \nL 167.164322 206.215149 \nL 165.48191 202.762646 \nL 163.799497 199.401506 \nL 162.117085 196.110595 \nL 160.434673 192.927625 \nL 158.752261 189.862737 \nL 157.069849 186.93599 \nL 155.387437 184.191103 \nL 153.705025 181.758896 \nL 152.022613 179.628302 \nL 150.340201 177.847761 \nL 148.657789 176.508074 \nL 146.975377 175.590073 \nL 145.292965 175.062108 \nL 143.610553 174.87673 \nL 141.928141 175.030171 \nL 140.245729 175.393182 \nL 138.563317 175.90476 \nL 136.880905 176.52988 \nL 135.198492 177.240149 \nL 133.51608 177.999072 \nL 131.833668 178.801331 \nL 130.151256 179.655016 \nL 128.468844 180.545171 \nL 126.786432 181.458193 \nL 125.10402 182.387954 \nL 123.421608 183.329099 \nL 121.739196 184.276879 \nL 120.056784 185.227929 \nL 118.374372 186.18314 \nL 116.69196 187.135577 \nL 115.009548 188.097348 \nL 113.327136 189.064272 \nL 111.644724 190.030614 \nL 109.962312 190.99747 \nL 108.279899 191.967543 \nL 106.597487 192.934311 \nL 104.915075 193.905805 \nL 103.232663 194.889366 \nL 101.550251 195.874465 \nL 99.867839 196.86413 \nL 98.185427 197.859305 \nL 96.503015 198.857139 \nL 94.820603 199.855748 \nL 93.138191 200.850897 \nL 91.455779 201.848556 \nL 89.773367 202.847561 \nL 88.090955 203.84691 \nL 86.408543 204.845922 \nL 84.726131 205.843527 \nL 83.043719 206.841243 \nL 81.361307 207.840904 \nL 79.678894 208.842704 \nL 77.996482 209.847094 \nL 76.31407 210.852503 \nL 74.631658 211.859497 \nL 72.949246 212.86749 \nL 71.266834 213.876857 \nL 69.584422 214.887691 \nL 67.90201 215.900268 \nL 66.219598 216.91548 \nL 64.537186 217.930328 \nL 62.854774 218.945921 \nL 61.172362 219.960631 \nL 59.48995 220.975405 \nL 57.807538 221.985807 \nL 56.125126 222.992049 \nL 54.442714 223.997982 \nL 52.760302 225.005656 \nL 51.077889 226.013859 \nL 49.395477 227.022858 \nL 47.713065 228.031936 \nL 46.030653 229.041837 \nL 44.348241 230.053197 \nL 42.665829 231.064877 \nL 40.983417 232.077193 \nL 39.301005 233.090901 \nL 37.618593 234.105517 \nL 35.936181 235.121809 \nL 34.253769 236.138728 \nL 32.571357 237.156433 \nL 30.888945 238.174291 \nL 29.206533 239.191901 \nL 27.524121 240.210105 \nL 25.841709 241.229146 \nL 24.159296 242.24873 \nL 22.476884 243.268398 \nL 20.794472 244.29009 \nL 19.11206 245.312355 \nL 17.429648 246.334931 \nL 15.747236 247.35897 \nL 14.064824 248.383867 \nL 12.382412 249.409562 \nL 10.7 250.436622 \nz\n\" style=\"fill:#1f77b4;fill-opacity:0.3;stroke:#1f77b4;stroke-opacity:0.3;\"/>\n   </g>\n   <g id=\"PolyCollection_3\">\n    <path clip-path=\"url(#p54966e6dbe)\" d=\"M 10.7 238.041779 \nL 10.7 250.436622 \nL 12.382412 249.409562 \nL 14.064824 248.383867 \nL 15.747236 247.35897 \nL 17.429648 246.334931 \nL 19.11206 245.312355 \nL 20.794472 244.29009 \nL 22.476884 243.268398 \nL 24.159296 242.24873 \nL 25.841709 241.229146 \nL 27.524121 240.210105 \nL 29.206533 239.191901 \nL 30.888945 238.174291 \nL 32.571357 237.156433 \nL 34.253769 236.138728 \nL 35.936181 235.121809 \nL 37.618593 234.105517 \nL 39.301005 233.090901 \nL 40.983417 232.077193 \nL 42.665829 231.064877 \nL 44.348241 230.053197 \nL 46.030653 229.041837 \nL 47.713065 228.031936 \nL 49.395477 227.022858 \nL 51.077889 226.013859 \nL 52.760302 225.005656 \nL 54.442714 223.997982 \nL 56.125126 222.992049 \nL 57.807538 221.985807 \nL 59.48995 220.975405 \nL 61.172362 219.960631 \nL 62.854774 218.945921 \nL 64.537186 217.930328 \nL 66.219598 216.91548 \nL 67.90201 215.900268 \nL 69.584422 214.887691 \nL 71.266834 213.876857 \nL 72.949246 212.86749 \nL 74.631658 211.859497 \nL 76.31407 210.852503 \nL 77.996482 209.847094 \nL 79.678894 208.842704 \nL 81.361307 207.840904 \nL 83.043719 206.841243 \nL 84.726131 205.843527 \nL 86.408543 204.845922 \nL 88.090955 203.84691 \nL 89.773367 202.847561 \nL 91.455779 201.848556 \nL 93.138191 200.850897 \nL 94.820603 199.855748 \nL 96.503015 198.857139 \nL 98.185427 197.859305 \nL 99.867839 196.86413 \nL 101.550251 195.874465 \nL 103.232663 194.889366 \nL 104.915075 193.905805 \nL 106.597487 192.934311 \nL 108.279899 191.967543 \nL 109.962312 190.99747 \nL 111.644724 190.030614 \nL 113.327136 189.064272 \nL 115.009548 188.097348 \nL 116.69196 187.135577 \nL 118.374372 186.18314 \nL 120.056784 185.227929 \nL 121.739196 184.276879 \nL 123.421608 183.329099 \nL 125.10402 182.387954 \nL 126.786432 181.458193 \nL 128.468844 180.545171 \nL 130.151256 179.655016 \nL 131.833668 178.801331 \nL 133.51608 177.999072 \nL 135.198492 177.240149 \nL 136.880905 176.52988 \nL 138.563317 175.90476 \nL 140.245729 175.393182 \nL 141.928141 175.030171 \nL 143.610553 174.87673 \nL 145.292965 175.062108 \nL 146.975377 175.590073 \nL 148.657789 176.508074 \nL 150.340201 177.847761 \nL 152.022613 179.628302 \nL 153.705025 181.758896 \nL 155.387437 184.191103 \nL 157.069849 186.93599 \nL 158.752261 189.862737 \nL 160.434673 192.927625 \nL 162.117085 196.110595 \nL 163.799497 199.401506 \nL 165.48191 202.762646 \nL 167.164322 206.215149 \nL 168.846734 209.717826 \nL 170.529146 213.256746 \nL 172.211558 216.807797 \nL 173.89397 220.355206 \nL 175.576382 223.880604 \nL 177.258794 227.396907 \nL 178.941206 230.840189 \nL 180.623618 234.192109 \nL 182.30603 237.478661 \nL 183.988442 240.683836 \nL 185.670854 243.775572 \nL 187.353266 246.74899 \nL 189.035678 249.562511 \nL 190.71809 252.265658 \nL 192.400503 254.804726 \nL 194.082915 257.183711 \nL 195.765327 259.400501 \nL 197.447739 261.496268 \nL 199.130151 263.4718 \nL 200.812563 265.358699 \nL 202.494975 267.170276 \nL 204.177387 268.940811 \nL 205.859799 270.691388 \nL 207.542211 272.413948 \nL 209.224623 274.124024 \nL 210.907035 275.823137 \nL 212.589447 277.522433 \nL 214.271859 279.240144 \nL 215.954271 280.978527 \nL 217.636683 282.744484 \nL 219.319095 284.529708 \nL 221.001508 286.342813 \nL 222.68392 288.178842 \nL 224.366332 290.053537 \nL 226.048744 291.975887 \nL 227.731156 293.948098 \nL 229.413568 295.969667 \nL 231.09598 298.034782 \nL 232.778392 300.149374 \nL 234.460804 302.303928 \nL 236.143216 304.496747 \nL 237.825628 306.726881 \nL 239.50804 309.006827 \nL 241.190452 311.336648 \nL 242.872864 313.713181 \nL 244.555276 316.159182 \nL 246.237688 318.673474 \nL 247.920101 321.243247 \nL 249.602513 323.876581 \nL 251.284925 326.589653 \nL 252.967337 329.381558 \nL 254.649749 332.240035 \nL 256.332161 335.179213 \nL 258.014573 338.215303 \nL 259.696985 341.331099 \nL 261.379397 344.545708 \nL 263.061809 347.857952 \nL 264.744221 351.270542 \nL 266.426633 354.792659 \nL 268.109045 358.421851 \nL 269.791457 362.163895 \nL 271.473869 366.037045 \nL 273.156281 370.045762 \nL 274.838693 374.187549 \nL 276.521106 378.460107 \nL 278.203518 382.879691 \nL 279.88593 387.457219 \nL 281.568342 392.183414 \nL 283.250754 397.08181 \nL 284.933166 402.18511 \nL 286.615578 407.491652 \nL 288.29799 412.992212 \nL 289.980402 418.701801 \nL 291.662814 424.613401 \nL 293.345226 430.753881 \nL 295.027638 437.130091 \nL 296.71005 443.743952 \nL 298.392462 450.622313 \nL 300.074874 457.772909 \nL 301.757286 465.201851 \nL 303.439698 472.927652 \nL 305.122111 480.961091 \nL 306.804523 489.325323 \nL 308.486935 498.037758 \nL 310.169347 507.099197 \nL 311.851759 516.527157 \nL 313.534171 526.342828 \nL 315.216583 536.562 \nL 316.898995 547.217918 \nL 318.581407 558.325939 \nL 320.263819 569.939179 \nL 321.946231 582.075502 \nL 323.628643 594.7213 \nL 325.311055 607.908584 \nL 326.993467 621.675762 \nL 328.675879 636.045298 \nL 330.358291 651.047864 \nL 332.040704 666.746017 \nL 333.723116 683.139324 \nL 335.405528 700.254657 \nL 337.08794 718.137464 \nL 338.770352 736.830255 \nL 340.452764 756.394635 \nL 342.135176 776.873935 \nL 343.817588 798.291225 \nL 345.5 820.698433 \nL 345.5 -193.752232 \nL 345.5 -193.752232 \nL 343.817588 -172.896043 \nL 342.135176 -153.029858 \nL 340.452764 -134.105291 \nL 338.770352 -116.097027 \nL 337.08794 -98.961649 \nL 335.405528 -82.636081 \nL 333.723116 -67.079046 \nL 332.040704 -52.245765 \nL 330.358291 -38.109042 \nL 328.675879 -24.669763 \nL 326.993467 -11.864509 \nL 325.311055 0.336854 \nL 323.628643 11.959188 \nL 321.946231 23.039247 \nL 320.263819 33.608513 \nL 318.581407 43.653854 \nL 316.898995 53.194429 \nL 315.216583 62.279488 \nL 313.534171 70.926258 \nL 311.851759 79.168543 \nL 310.169347 87.023365 \nL 308.486935 94.511893 \nL 306.804523 101.650576 \nL 305.122111 108.440774 \nL 303.439698 114.899919 \nL 301.757286 121.049159 \nL 300.074874 126.902215 \nL 298.392462 132.473976 \nL 296.71005 137.773168 \nL 295.027638 142.807319 \nL 293.345226 147.604386 \nL 291.662814 152.166778 \nL 289.980402 156.501605 \nL 288.29799 160.634439 \nL 286.615578 164.56272 \nL 284.933166 168.295492 \nL 283.250754 171.824921 \nL 281.568342 175.152091 \nL 279.88593 178.305921 \nL 278.203518 181.311526 \nL 276.521106 184.162223 \nL 274.838693 186.865747 \nL 273.156281 189.430499 \nL 271.473869 191.856784 \nL 269.791457 194.146185 \nL 268.109045 196.304691 \nL 266.426633 198.348913 \nL 264.744221 200.285996 \nL 263.061809 202.112888 \nL 261.379397 203.843263 \nL 259.696985 205.485506 \nL 258.014573 207.031319 \nL 256.332161 208.496161 \nL 254.649749 209.866235 \nL 252.967337 211.158758 \nL 251.284925 212.379415 \nL 249.602513 213.521348 \nL 247.920101 214.584843 \nL 246.237688 215.576225 \nL 244.555276 216.495919 \nL 242.872864 217.345842 \nL 241.190452 218.125099 \nL 239.50804 218.844373 \nL 237.825628 219.50572 \nL 236.143216 220.107187 \nL 234.460804 220.666332 \nL 232.778392 221.179495 \nL 231.09598 221.647912 \nL 229.413568 222.070099 \nL 227.731156 222.431565 \nL 226.048744 222.735755 \nL 224.366332 222.97923 \nL 222.68392 223.156053 \nL 221.001508 223.280268 \nL 219.319095 223.340218 \nL 217.636683 223.344622 \nL 215.954271 223.293372 \nL 214.271859 223.204118 \nL 212.589447 223.058739 \nL 210.907035 222.843215 \nL 209.224623 222.560508 \nL 207.542211 222.208314 \nL 205.859799 221.766951 \nL 204.177387 221.230273 \nL 202.494975 220.590754 \nL 200.812563 219.855779 \nL 199.130151 219.001827 \nL 197.447739 218.035579 \nL 195.765327 216.921815 \nL 194.082915 215.670487 \nL 192.400503 214.25515 \nL 190.71809 212.675939 \nL 189.035678 210.931036 \nL 187.353266 209.066753 \nL 185.670854 207.073972 \nL 183.988442 204.946163 \nL 182.30603 202.688914 \nL 180.623618 200.328984 \nL 178.941206 197.868667 \nL 177.258794 195.342452 \nL 175.576382 192.762157 \nL 173.89397 190.16819 \nL 172.211558 187.52193 \nL 170.529146 184.851977 \nL 168.846734 182.174936 \nL 167.164322 179.531354 \nL 165.48191 176.943463 \nL 163.799497 174.413003 \nL 162.117085 171.970653 \nL 160.434673 169.613946 \nL 158.752261 167.351565 \nL 157.069849 165.20306 \nL 155.387437 163.218647 \nL 153.705025 161.458045 \nL 152.022613 159.942449 \nL 150.340201 158.716935 \nL 148.657789 157.855396 \nL 146.975377 157.357464 \nL 145.292965 157.21241 \nL 143.610553 157.390124 \nL 141.928141 157.865313 \nL 140.245729 158.524119 \nL 138.563317 159.314719 \nL 136.880905 160.197378 \nL 135.198492 161.141535 \nL 133.51608 162.122843 \nL 131.833668 163.132846 \nL 130.151256 164.174981 \nL 128.468844 165.24432 \nL 126.786432 166.333339 \nL 125.10402 167.429238 \nL 123.421608 168.533532 \nL 121.739196 169.64125 \nL 120.056784 170.747155 \nL 118.374372 171.849316 \nL 116.69196 172.950389 \nL 115.009548 174.046817 \nL 113.327136 175.139033 \nL 111.644724 176.232365 \nL 109.962312 177.328383 \nL 108.279899 178.425043 \nL 106.597487 179.518194 \nL 104.915075 180.61162 \nL 103.232663 181.697623 \nL 101.550251 182.781153 \nL 99.867839 183.857891 \nL 98.185427 184.928106 \nL 96.503015 185.998048 \nL 94.820603 187.065803 \nL 93.138191 188.129742 \nL 91.455779 189.191485 \nL 89.773367 190.251129 \nL 88.090955 191.308024 \nL 86.408543 192.360978 \nL 84.726131 193.408487 \nL 83.043719 194.452298 \nL 81.361307 195.49283 \nL 79.678894 196.53246 \nL 77.996482 197.571588 \nL 76.31407 198.608838 \nL 74.631658 199.640317 \nL 72.949246 200.668786 \nL 71.266834 201.694982 \nL 69.584422 202.720107 \nL 67.90201 203.744546 \nL 66.219598 204.768483 \nL 64.537186 205.790404 \nL 62.854774 206.811501 \nL 61.172362 207.829245 \nL 59.48995 208.846234 \nL 57.807538 209.862461 \nL 56.125126 210.882891 \nL 54.442714 211.901312 \nL 52.760302 212.919306 \nL 51.077889 213.935957 \nL 49.395477 214.951426 \nL 47.713065 215.964903 \nL 46.030653 216.976672 \nL 44.348241 217.988256 \nL 42.665829 218.998307 \nL 40.983417 220.007373 \nL 39.301005 221.016372 \nL 37.618593 222.024913 \nL 35.936181 223.033553 \nL 34.253769 224.041265 \nL 32.571357 225.047467 \nL 30.888945 226.051269 \nL 29.206533 227.053537 \nL 27.524121 228.054471 \nL 25.841709 229.054644 \nL 24.159296 230.054182 \nL 22.476884 231.052643 \nL 20.794472 232.05192 \nL 19.11206 233.050608 \nL 17.429648 234.048434 \nL 15.747236 235.046774 \nL 14.064824 236.045056 \nL 12.382412 237.043218 \nL 10.7 238.041779 \nz\n\" style=\"fill:#ff7f0e;fill-opacity:0.4;stroke:#ff7f0e;stroke-opacity:0.4;\"/>\n   </g>\n   <g id=\"matplotlib.axis_1\">\n    <g id=\"xtick_1\">\n     <g id=\"line2d_1\">\n      <path clip-path=\"url(#p54966e6dbe)\" d=\"M 44.18 315.39375 \nL 44.18 43.59375 \n\" style=\"fill:none;stroke:#b0b0b0;stroke-linecap:square;stroke-opacity:0.3;stroke-width:0.8;\"/>\n     </g>\n     <g id=\"line2d_2\">\n      <defs>\n       <path d=\"M 0 0 \nL 0 3.5 \n\" id=\"m42271e5db2\" style=\"stroke:#000000;stroke-width:0.8;\"/>\n      </defs>\n      <g>\n       <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"44.18\" xlink:href=\"#m42271e5db2\" y=\"315.39375\"/>\n      </g>\n     </g>\n     <g id=\"text_1\">\n      <!-- −4 -->\n      <defs>\n       <path d=\"M 10.59375 35.5 \nL 73.1875 35.5 \nL 73.1875 27.203125 \nL 10.59375 27.203125 \nz\n\" id=\"DejaVuSans-8722\"/>\n       <path d=\"M 37.796875 64.3125 \nL 12.890625 25.390625 \nL 37.796875 25.390625 \nz\nM 35.203125 72.90625 \nL 47.609375 72.90625 \nL 47.609375 25.390625 \nL 58.015625 25.390625 \nL 58.015625 17.1875 \nL 47.609375 17.1875 \nL 47.609375 0 \nL 37.796875 0 \nL 37.796875 17.1875 \nL 4.890625 17.1875 \nL 4.890625 26.703125 \nz\n\" id=\"DejaVuSans-52\"/>\n      </defs>\n      <g transform=\"translate(22.066719 345.189062)scale(0.3 -0.3)\">\n       <use xlink:href=\"#DejaVuSans-8722\"/>\n       <use x=\"83.789062\" xlink:href=\"#DejaVuSans-52\"/>\n      </g>\n     </g>\n    </g>\n    <g id=\"xtick_2\">\n     <g id=\"line2d_3\">\n      <path clip-path=\"url(#p54966e6dbe)\" d=\"M 111.14 315.39375 \nL 111.14 43.59375 \n\" style=\"fill:none;stroke:#b0b0b0;stroke-linecap:square;stroke-opacity:0.3;stroke-width:0.8;\"/>\n     </g>\n     <g id=\"line2d_4\">\n      <g>\n       <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"111.14\" xlink:href=\"#m42271e5db2\" y=\"315.39375\"/>\n      </g>\n     </g>\n     <g id=\"text_2\">\n      <!-- −2 -->\n      <defs>\n       <path d=\"M 19.1875 8.296875 \nL 53.609375 8.296875 \nL 53.609375 0 \nL 7.328125 0 \nL 7.328125 8.296875 \nQ 12.9375 14.109375 22.625 23.890625 \nQ 32.328125 33.6875 34.8125 36.53125 \nQ 39.546875 41.84375 41.421875 45.53125 \nQ 43.3125 49.21875 43.3125 52.78125 \nQ 43.3125 58.59375 39.234375 62.25 \nQ 35.15625 65.921875 28.609375 65.921875 \nQ 23.96875 65.921875 18.8125 64.3125 \nQ 13.671875 62.703125 7.8125 59.421875 \nL 7.8125 69.390625 \nQ 13.765625 71.78125 18.9375 73 \nQ 24.125 74.21875 28.421875 74.21875 \nQ 39.75 74.21875 46.484375 68.546875 \nQ 53.21875 62.890625 53.21875 53.421875 \nQ 53.21875 48.921875 51.53125 44.890625 \nQ 49.859375 40.875 45.40625 35.40625 \nQ 44.1875 33.984375 37.640625 27.21875 \nQ 31.109375 20.453125 19.1875 8.296875 \nz\n\" id=\"DejaVuSans-50\"/>\n      </defs>\n      <g transform=\"translate(89.026719 345.189062)scale(0.3 -0.3)\">\n       <use xlink:href=\"#DejaVuSans-8722\"/>\n       <use x=\"83.789062\" xlink:href=\"#DejaVuSans-50\"/>\n      </g>\n     </g>\n    </g>\n    <g id=\"xtick_3\">\n     <g id=\"line2d_5\">\n      <path clip-path=\"url(#p54966e6dbe)\" d=\"M 178.1 315.39375 \nL 178.1 43.59375 \n\" style=\"fill:none;stroke:#b0b0b0;stroke-linecap:square;stroke-opacity:0.3;stroke-width:0.8;\"/>\n     </g>\n     <g id=\"line2d_6\">\n      <g>\n       <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"178.1\" xlink:href=\"#m42271e5db2\" y=\"315.39375\"/>\n      </g>\n     </g>\n     <g id=\"text_3\">\n      <!-- 0 -->\n      <defs>\n       <path d=\"M 31.78125 66.40625 \nQ 24.171875 66.40625 20.328125 58.90625 \nQ 16.5 51.421875 16.5 36.375 \nQ 16.5 21.390625 20.328125 13.890625 \nQ 24.171875 6.390625 31.78125 6.390625 \nQ 39.453125 6.390625 43.28125 13.890625 \nQ 47.125 21.390625 47.125 36.375 \nQ 47.125 51.421875 43.28125 58.90625 \nQ 39.453125 66.40625 31.78125 66.40625 \nz\nM 31.78125 74.21875 \nQ 44.046875 74.21875 50.515625 64.515625 \nQ 56.984375 54.828125 56.984375 36.375 \nQ 56.984375 17.96875 50.515625 8.265625 \nQ 44.046875 -1.421875 31.78125 -1.421875 \nQ 19.53125 -1.421875 13.0625 8.265625 \nQ 6.59375 17.96875 6.59375 36.375 \nQ 6.59375 54.828125 13.0625 64.515625 \nQ 19.53125 74.21875 31.78125 74.21875 \nz\n\" id=\"DejaVuSans-48\"/>\n      </defs>\n      <g transform=\"translate(168.55625 345.189062)scale(0.3 -0.3)\">\n       <use xlink:href=\"#DejaVuSans-48\"/>\n      </g>\n     </g>\n    </g>\n    <g id=\"xtick_4\">\n     <g id=\"line2d_7\">\n      <path clip-path=\"url(#p54966e6dbe)\" d=\"M 245.06 315.39375 \nL 245.06 43.59375 \n\" style=\"fill:none;stroke:#b0b0b0;stroke-linecap:square;stroke-opacity:0.3;stroke-width:0.8;\"/>\n     </g>\n     <g id=\"line2d_8\">\n      <g>\n       <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"245.06\" xlink:href=\"#m42271e5db2\" y=\"315.39375\"/>\n      </g>\n     </g>\n     <g id=\"text_4\">\n      <!-- 2 -->\n      <g transform=\"translate(235.51625 345.189062)scale(0.3 -0.3)\">\n       <use xlink:href=\"#DejaVuSans-50\"/>\n      </g>\n     </g>\n    </g>\n    <g id=\"xtick_5\">\n     <g id=\"line2d_9\">\n      <path clip-path=\"url(#p54966e6dbe)\" d=\"M 312.02 315.39375 \nL 312.02 43.59375 \n\" style=\"fill:none;stroke:#b0b0b0;stroke-linecap:square;stroke-opacity:0.3;stroke-width:0.8;\"/>\n     </g>\n     <g id=\"line2d_10\">\n      <g>\n       <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"312.02\" xlink:href=\"#m42271e5db2\" y=\"315.39375\"/>\n      </g>\n     </g>\n     <g id=\"text_5\">\n      <!-- 4 -->\n      <g transform=\"translate(302.47625 345.189062)scale(0.3 -0.3)\">\n       <use xlink:href=\"#DejaVuSans-52\"/>\n      </g>\n     </g>\n    </g>\n    <g id=\"text_6\">\n     <!-- $x$ -->\n     <defs>\n      <path d=\"M 60.015625 54.6875 \nL 34.90625 27.875 \nL 50.296875 0 \nL 39.984375 0 \nL 28.421875 21.6875 \nL 8.296875 0 \nL -2.59375 0 \nL 24.3125 28.8125 \nL 10.015625 54.6875 \nL 20.3125 54.6875 \nL 30.8125 34.90625 \nL 49.125 54.6875 \nz\n\" id=\"DejaVuSans-Oblique-120\"/>\n     </defs>\n     <g transform=\"translate(169.1 378.223437)scale(0.3 -0.3)\">\n      <use transform=\"translate(0 0.3125)\" xlink:href=\"#DejaVuSans-Oblique-120\"/>\n     </g>\n    </g>\n   </g>\n   <g id=\"matplotlib.axis_2\">\n    <g id=\"ytick_1\">\n     <g id=\"line2d_11\">\n      <path clip-path=\"url(#p54966e6dbe)\" d=\"M 10.7 292.74375 \nL 345.5 292.74375 \n\" style=\"fill:none;stroke:#b0b0b0;stroke-linecap:square;stroke-opacity:0.3;stroke-width:0.8;\"/>\n     </g>\n     <g id=\"line2d_12\">\n      <defs>\n       <path d=\"M 0 0 \nL -3.5 0 \n\" id=\"m87cecb9e78\" style=\"stroke:#000000;stroke-width:0.8;\"/>\n      </defs>\n      <g>\n       <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"10.7\" xlink:href=\"#m87cecb9e78\" y=\"292.74375\"/>\n      </g>\n     </g>\n    </g>\n    <g id=\"ytick_2\">\n     <g id=\"line2d_13\">\n      <path clip-path=\"url(#p54966e6dbe)\" d=\"M 10.7 247.44375 \nL 345.5 247.44375 \n\" style=\"fill:none;stroke:#b0b0b0;stroke-linecap:square;stroke-opacity:0.3;stroke-width:0.8;\"/>\n     </g>\n     <g id=\"line2d_14\">\n      <g>\n       <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"10.7\" xlink:href=\"#m87cecb9e78\" y=\"247.44375\"/>\n      </g>\n     </g>\n    </g>\n    <g id=\"ytick_3\">\n     <g id=\"line2d_15\">\n      <path clip-path=\"url(#p54966e6dbe)\" d=\"M 10.7 202.14375 \nL 345.5 202.14375 \n\" style=\"fill:none;stroke:#b0b0b0;stroke-linecap:square;stroke-opacity:0.3;stroke-width:0.8;\"/>\n     </g>\n     <g id=\"line2d_16\">\n      <g>\n       <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"10.7\" xlink:href=\"#m87cecb9e78\" y=\"202.14375\"/>\n      </g>\n     </g>\n    </g>\n    <g id=\"ytick_4\">\n     <g id=\"line2d_17\">\n      <path clip-path=\"url(#p54966e6dbe)\" d=\"M 10.7 156.84375 \nL 345.5 156.84375 \n\" style=\"fill:none;stroke:#b0b0b0;stroke-linecap:square;stroke-opacity:0.3;stroke-width:0.8;\"/>\n     </g>\n     <g id=\"line2d_18\">\n      <g>\n       <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"10.7\" xlink:href=\"#m87cecb9e78\" y=\"156.84375\"/>\n      </g>\n     </g>\n    </g>\n    <g id=\"ytick_5\">\n     <g id=\"line2d_19\">\n      <path clip-path=\"url(#p54966e6dbe)\" d=\"M 10.7 111.54375 \nL 345.5 111.54375 \n\" style=\"fill:none;stroke:#b0b0b0;stroke-linecap:square;stroke-opacity:0.3;stroke-width:0.8;\"/>\n     </g>\n     <g id=\"line2d_20\">\n      <g>\n       <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"10.7\" xlink:href=\"#m87cecb9e78\" y=\"111.54375\"/>\n      </g>\n     </g>\n    </g>\n    <g id=\"ytick_6\">\n     <g id=\"line2d_21\">\n      <path clip-path=\"url(#p54966e6dbe)\" d=\"M 10.7 66.24375 \nL 345.5 66.24375 \n\" style=\"fill:none;stroke:#b0b0b0;stroke-linecap:square;stroke-opacity:0.3;stroke-width:0.8;\"/>\n     </g>\n     <g id=\"line2d_22\">\n      <g>\n       <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"10.7\" xlink:href=\"#m87cecb9e78\" y=\"66.24375\"/>\n      </g>\n     </g>\n    </g>\n   </g>\n   <g id=\"line2d_23\">\n    <path clip-path=\"url(#p54966e6dbe)\" d=\"M 10.7 244.239199 \nL 93.138191 194.490319 \nL 130.151256 171.914999 \nL 135.198492 169.190842 \nL 138.563317 167.609739 \nL 140.245729 166.95865 \nL 141.928141 166.447742 \nL 143.610553 166.133427 \nL 145.292965 166.137259 \nL 146.975377 166.473768 \nL 148.657789 167.181735 \nL 150.340201 168.282348 \nL 152.022613 169.785375 \nL 153.705025 171.608471 \nL 155.387437 173.704875 \nL 158.752261 178.607151 \nL 162.117085 184.040624 \nL 165.48191 189.853055 \nL 170.529146 199.054361 \nL 177.258794 211.36968 \nL 180.623618 217.260546 \nL 183.988442 222.815 \nL 187.353266 227.907872 \nL 190.71809 232.470798 \nL 194.082915 236.4271 \nL 197.447739 239.765924 \nL 200.812563 242.607237 \nL 204.177387 245.085542 \nL 207.542211 247.31113 \nL 212.589447 250.290585 \nL 219.319095 253.934962 \nL 226.048744 257.35582 \nL 239.50804 263.925599 \nL 254.649749 271.053135 \nL 313.534171 298.634537 \nL 345.5 313.473089 \nL 345.5 313.473089 \n\" style=\"fill:none;stroke:#000000;stroke-linecap:square;\"/>\n   </g>\n   <g id=\"patch_3\">\n    <path d=\"M 10.7 315.39375 \nL 10.7 43.59375 \n\" style=\"fill:none;stroke:#000000;stroke-linecap:square;stroke-linejoin:miter;stroke-width:0.8;\"/>\n   </g>\n   <g id=\"patch_4\">\n    <path d=\"M 345.5 315.39375 \nL 345.5 43.59375 \n\" style=\"fill:none;stroke:#000000;stroke-linecap:square;stroke-linejoin:miter;stroke-width:0.8;\"/>\n   </g>\n   <g id=\"patch_5\">\n    <path d=\"M 10.7 315.39375 \nL 345.5 315.39375 \n\" style=\"fill:none;stroke:#000000;stroke-linecap:square;stroke-linejoin:miter;stroke-width:0.8;\"/>\n   </g>\n   <g id=\"patch_6\">\n    <path d=\"M 10.7 43.59375 \nL 345.5 43.59375 \n\" style=\"fill:none;stroke:#000000;stroke-linecap:square;stroke-linejoin:miter;stroke-width:0.8;\"/>\n   </g>\n   <g id=\"text_7\">\n    <!-- SGLD -->\n    <defs>\n     <path d=\"M 53.515625 70.515625 \nL 53.515625 60.890625 \nQ 47.90625 63.578125 42.921875 64.890625 \nQ 37.9375 66.21875 33.296875 66.21875 \nQ 25.25 66.21875 20.875 63.09375 \nQ 16.5 59.96875 16.5 54.203125 \nQ 16.5 49.359375 19.40625 46.890625 \nQ 22.3125 44.4375 30.421875 42.921875 \nL 36.375 41.703125 \nQ 47.40625 39.59375 52.65625 34.296875 \nQ 57.90625 29 57.90625 20.125 \nQ 57.90625 9.515625 50.796875 4.046875 \nQ 43.703125 -1.421875 29.984375 -1.421875 \nQ 24.8125 -1.421875 18.96875 -0.25 \nQ 13.140625 0.921875 6.890625 3.21875 \nL 6.890625 13.375 \nQ 12.890625 10.015625 18.65625 8.296875 \nQ 24.421875 6.59375 29.984375 6.59375 \nQ 38.421875 6.59375 43.015625 9.90625 \nQ 47.609375 13.234375 47.609375 19.390625 \nQ 47.609375 24.75 44.3125 27.78125 \nQ 41.015625 30.8125 33.5 32.328125 \nL 27.484375 33.5 \nQ 16.453125 35.6875 11.515625 40.375 \nQ 6.59375 45.0625 6.59375 53.421875 \nQ 6.59375 63.09375 13.40625 68.65625 \nQ 20.21875 74.21875 32.171875 74.21875 \nQ 37.3125 74.21875 42.625 73.28125 \nQ 47.953125 72.359375 53.515625 70.515625 \nz\n\" id=\"DejaVuSans-83\"/>\n     <path d=\"M 59.515625 10.40625 \nL 59.515625 29.984375 \nL 43.40625 29.984375 \nL 43.40625 38.09375 \nL 69.28125 38.09375 \nL 69.28125 6.78125 \nQ 63.578125 2.734375 56.6875 0.65625 \nQ 49.8125 -1.421875 42 -1.421875 \nQ 24.90625 -1.421875 15.25 8.5625 \nQ 5.609375 18.5625 5.609375 36.375 \nQ 5.609375 54.25 15.25 64.234375 \nQ 24.90625 74.21875 42 74.21875 \nQ 49.125 74.21875 55.546875 72.453125 \nQ 61.96875 70.703125 67.390625 67.28125 \nL 67.390625 56.78125 \nQ 61.921875 61.421875 55.765625 63.765625 \nQ 49.609375 66.109375 42.828125 66.109375 \nQ 29.4375 66.109375 22.71875 58.640625 \nQ 16.015625 51.171875 16.015625 36.375 \nQ 16.015625 21.625 22.71875 14.15625 \nQ 29.4375 6.6875 42.828125 6.6875 \nQ 48.046875 6.6875 52.140625 7.59375 \nQ 56.25 8.5 59.515625 10.40625 \nz\n\" id=\"DejaVuSans-71\"/>\n     <path d=\"M 9.8125 72.90625 \nL 19.671875 72.90625 \nL 19.671875 8.296875 \nL 55.171875 8.296875 \nL 55.171875 0 \nL 9.8125 0 \nz\n\" id=\"DejaVuSans-76\"/>\n     <path d=\"M 19.671875 64.796875 \nL 19.671875 8.109375 \nL 31.59375 8.109375 \nQ 46.6875 8.109375 53.6875 14.9375 \nQ 60.6875 21.78125 60.6875 36.53125 \nQ 60.6875 51.171875 53.6875 57.984375 \nQ 46.6875 64.796875 31.59375 64.796875 \nz\nM 9.8125 72.90625 \nL 30.078125 72.90625 \nQ 51.265625 72.90625 61.171875 64.09375 \nQ 71.09375 55.28125 71.09375 36.53125 \nQ 71.09375 17.671875 61.125 8.828125 \nQ 51.171875 0 30.078125 0 \nL 9.8125 0 \nz\n\" id=\"DejaVuSans-68\"/>\n    </defs>\n    <g transform=\"translate(123.3625 37.59375)scale(0.4 -0.4)\">\n     <use xlink:href=\"#DejaVuSans-83\"/>\n     <use x=\"63.476562\" xlink:href=\"#DejaVuSans-71\"/>\n     <use x=\"140.966797\" xlink:href=\"#DejaVuSans-76\"/>\n     <use x=\"196.679688\" xlink:href=\"#DejaVuSans-68\"/>\n    </g>\n   </g>\n  </g>\n </g>\n <defs>\n  <clipPath id=\"p54966e6dbe\">\n   <rect height=\"271.8\" width=\"334.8\" x=\"10.7\" y=\"43.59375\"/>\n  </clipPath>\n </defs>\n</svg>\n"},"metadata":{"tags":[]}}]},{"cell_type":"markdown","metadata":{"id":"QBBlvk1JylkX","colab_type":"text"},"source":["# UCI dataset"]},{"cell_type":"code","metadata":{"id":"xqs5AazDj93D","colab_type":"code","colab":{}},"source":["class Langevin_Model_UCI(nn.Module):\n","    def __init__(self, input_dim, output_dim, num_units):\n","        super(Langevin_Model_UCI, self).__init__()\n","        \n","        self.input_dim = input_dim\n","        self.output_dim = output_dim\n","        \n","        # network with two hidden and one output layer\n","        self.layer1 = Langevin_Layer(input_dim, num_units)\n","        self.layer2 = Langevin_Layer(num_units, num_units)\n","        self.layer3 = Langevin_Layer(num_units, 2*output_dim)\n","        \n","        # activation to be used between hidden layers\n","        self.activation = nn.ReLU(inplace = True)\n","    \n","    def forward(self, x):\n","        \n","        x = x.view(-1, self.input_dim)\n","        \n","        x = self.layer1(x)\n","        x = self.activation(x)\n","        \n","        x = self.layer2(x)\n","        x = self.activation(x)\n","        \n","        x = self.layer3(x)\n","        \n","        return x\n","\n","\n","def train_mc_dropout(data, n_splits, burn_in, mix_time, num_nets, num_units, learn_rate, weight_decay, log_every):\n","    \n","    kf = KFold(n_splits=n_splits)\n","    in_dim = data.shape[1] - 1\n","    train_logliks, test_logliks = [], []\n","    train_rmses, test_rmses = [], []\n","\n","    for j, idx in enumerate(kf.split(data)):\n","        print('FOLD %d:' % j)\n","        train_index, test_index = idx\n","\n","        x_train, y_train = data[train_index, :in_dim], data[train_index, in_dim:]\n","        x_test, y_test = data[test_index, :in_dim], data[test_index, in_dim:]\n","\n","        x_means, x_stds = x_train.mean(axis = 0), x_train.var(axis = 0)**0.5\n","        y_means, y_stds = y_train.mean(axis = 0), y_train.var(axis = 0)**0.5\n","\n","        x_train = (x_train - x_means)/x_stds\n","        y_train = (y_train - y_means)/y_stds\n","\n","        x_test = (x_test - x_means)/x_stds\n","        y_test = (y_test - y_means)/y_stds\n","\n","        net = Langevin_Wrapper(network=Langevin_Model_UCI(input_dim=in_dim, output_dim=1, num_units=num_units),\n","                               learn_rate=learn_rate, batch_size=batch_size, no_batches=1, weight_decay=weight_decay)\n","\n","        nets, losses = [], []\n","        num_epochs = burn_in + mix_time*num_nets + 1\n","        fit_loss_train = np.zeros(num_epochs)\n","\n","        for i in range(num_epochs):\n","            loss = net.fit(x_train, y_train)\n","\n","            if i % mix_time == 0 and i > burn_in:\n","                nets.append(copy.deepcopy(net.network))\n","                \n","            if i % log_every == 0 or i == num_epochs - 1:\n","                test_loss = net.test_loss(x_test, y_test).cpu().data.numpy()\n","\n","                if len(nets) > 10: ensemble_loss, rmse = eval_ensemble(x_test, y_test, nets)\n","                else: ensemble_loss, rmse = float('nan'), float('nan')\n","\n","                print('Epoch: %4d, Train loss: %6.3f Test loss: %6.3f Ensemble loss: %6.3f RMSE: %.3f Num. networks: %2d' %\n","                      (i, loss.cpu().data.numpy()/len(x_train), test_loss/len(x_test), ensemble_loss, rmse*y_stds[0], len(nets)))\n","\n","\n","        train_loss, train_rmse = eval_ensemble(x_train, y_train, nets)\n","        test_loss, test_rmse = eval_ensemble(x_test, y_test, nets)\n","\n","        train_logliks.append(-(train_loss.cpu().data.numpy() + np.log(y_stds)[0]))\n","        test_logliks.append(-(test_loss.cpu().data.numpy() + np.log(y_stds)[0]))\n","\n","        train_rmses.append(y_stds[0]*train_rmse.cpu().data.numpy())\n","        test_rmses.append(y_stds[0]*test_rmse.cpu().data.numpy())\n","\n","\n","    print('Train log. lik. = %6.3f +/- %6.3f' % (np.array(train_logliks).mean(), np.array(train_logliks).var()**0.5))\n","    print('Test  log. lik. = %6.3f +/- %6.3f' % (np.array(test_logliks).mean(), np.array(test_logliks).var()**0.5))\n","    print('Train RMSE      = %6.3f +/- %6.3f' % (np.array(train_rmses).mean(), np.array(train_rmses).var()**0.5))\n","    print('Test  RMSE      = %6.3f +/- %6.3f' % (np.array(test_rmses).mean(), np.array(test_rmses).var()**0.5))\n","    \n","    return nets"],"execution_count":0,"outputs":[]},{"cell_type":"markdown","metadata":{"id":"HXpA9VBGMsxc","colab_type":"text"},"source":["# Housing dataset"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"cOq4tM_PLRUV","outputId":"69100820-ab92-450b-bf1c-152e53e0a6d8","executionInfo":{"status":"ok","timestamp":1558119832104,"user_tz":-60,"elapsed":2899,"user":{"displayName":"Stratis Markou","photoUrl":"","userId":"09754366312766083286"}},"colab":{"base_uri":"https://localhost:8080/","height":204}},"source":["np.random.seed(0)\n","!wget \"https://archive.ics.uci.edu/ml/machine-learning-databases/housing/housing.data\" --no-check-certificate \n","data = pd.read_csv('housing.data', header=0, delimiter=\"\\s+\").values\n","data = data[np.random.permutation(np.arange(len(data)))]"],"execution_count":81,"outputs":[{"output_type":"stream","text":["--2019-05-17 19:03:50--  https://archive.ics.uci.edu/ml/machine-learning-databases/housing/housing.data\n","Resolving archive.ics.uci.edu (archive.ics.uci.edu)... 128.195.10.252\n","Connecting to archive.ics.uci.edu (archive.ics.uci.edu)|128.195.10.252|:443... connected.\n","HTTP request sent, awaiting response... 200 OK\n","Length: 49082 (48K) [application/x-httpd-php]\n","Saving to: ‘housing.data.11’\n","\n","housing.data.11     100%[===================>]  47.93K   170KB/s    in 0.3s    \n","\n","2019-05-17 19:03:50 (170 KB/s) - ‘housing.data.11’ saved [49082/49082]\n","\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"zw2MfGuWKU6G","colab_type":"code","outputId":"448a9a94-cac4-44d1-a0c1-0cf8dd119cc5","executionInfo":{"status":"ok","timestamp":1558119911822,"user_tz":-60,"elapsed":81769,"user":{"displayName":"Stratis Markou","photoUrl":"","userId":"09754366312766083286"}},"colab":{"base_uri":"https://localhost:8080/","height":2125}},"source":["ensemble = train_mc_dropout(data=data, n_splits=10, burn_in=3000, mix_time=100, num_nets=20,\n","                            num_units=100, learn_rate=1e-2/len(data), weight_decay=1, log_every=500)"],"execution_count":82,"outputs":[{"output_type":"stream","text":["FOLD 0:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.564 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.276 Test loss: -0.331 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.766 Test loss: -0.717 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.790 Test loss: -0.305 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.770 Test loss: -0.786 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss:  0.045 Test loss: -0.345 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.632 Test loss: -0.590 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.787 Test loss: -0.366 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.530 Test loss: -0.286 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.546 Test loss: -0.273 Ensemble loss: -0.625 RMSE: 2.828 Num. networks: 15\n","Epoch: 5000, Train loss: -0.658 Test loss: -0.389 Ensemble loss: -0.632 RMSE: 2.647 Num. networks: 20\n","FOLD 1:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.499 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.368 Test loss: -0.311 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.829 Test loss: -0.470 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.863 Test loss: -0.790 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.824 Test loss: -0.616 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.819 Test loss: -0.322 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.888 Test loss: -0.112 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.734 Test loss: -0.487 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.644 Test loss: -0.046 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.905 Test loss: -0.536 Ensemble loss: -0.670 RMSE: 3.439 Num. networks: 15\n","Epoch: 5000, Train loss: -0.769 Test loss:  0.941 Ensemble loss: -0.681 RMSE: 3.297 Num. networks: 20\n","FOLD 2:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.344 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.450 Test loss: -0.314 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.879 Test loss: -0.567 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.928 Test loss:  0.065 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.804 Test loss: -0.378 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.642 Test loss:  0.276 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.919 Test loss:  0.841 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.918 Test loss:  0.044 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  0.082 Test loss:  0.624 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.878 Test loss:  0.319 Ensemble loss: -0.525 RMSE: 4.367 Num. networks: 15\n","Epoch: 5000, Train loss: -0.773 Test loss: -0.303 Ensemble loss: -0.574 RMSE: 4.237 Num. networks: 20\n","FOLD 3:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.553 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.400 Test loss: -0.175 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.770 Test loss: -0.317 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.537 Test loss:  0.205 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.814 Test loss: -0.381 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.782 Test loss: -0.067 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.846 Test loss:  0.198 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.889 Test loss: -0.417 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.927 Test loss: -0.251 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.758 Test loss: -0.362 Ensemble loss: -0.624 RMSE: 4.337 Num. networks: 15\n","Epoch: 5000, Train loss: -0.840 Test loss:  0.127 Ensemble loss: -0.636 RMSE: 4.370 Num. networks: 20\n","FOLD 4:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.497 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.430 Test loss: -0.359 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.790 Test loss: -0.301 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.757 Test loss: -0.722 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.787 Test loss:  0.113 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.742 Test loss: -0.411 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.761 Test loss:  0.122 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.660 Test loss:  5.950 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.759 Test loss:  0.137 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.620 Test loss: 11.649 Ensemble loss: -0.542 RMSE: 5.571 Num. networks: 15\n","Epoch: 5000, Train loss: -0.924 Test loss:  4.491 Ensemble loss: -0.577 RMSE: 5.409 Num. networks: 20\n","FOLD 5:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.288 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.371 Test loss: -0.558 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.702 Test loss: -0.569 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.845 Test loss: -0.516 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.814 Test loss: -0.469 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.784 Test loss: -0.664 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss:  0.326 Test loss: -0.208 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.368 Test loss: -0.546 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.863 Test loss: -0.561 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.821 Test loss: -0.484 Ensemble loss: -0.719 RMSE: 2.558 Num. networks: 15\n","Epoch: 5000, Train loss: -0.883 Test loss: -0.363 Ensemble loss: -0.733 RMSE: 2.510 Num. networks: 20\n","FOLD 6:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.377 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.144 Test loss: -0.315 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.782 Test loss: -0.539 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.457 Test loss: -0.446 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.054 Test loss:  0.582 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.645 Test loss: -0.524 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.196 Test loss:  1.421 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.507 Test loss: -0.121 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.675 Test loss: -0.376 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.301 Test loss: -0.083 Ensemble loss: -0.629 RMSE: 3.117 Num. networks: 15\n","Epoch: 5000, Train loss: -0.847 Test loss:  0.029 Ensemble loss: -0.616 RMSE: 3.052 Num. networks: 20\n","FOLD 7:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.875 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.338 Test loss: -0.112 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.803 Test loss: -0.293 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.699 Test loss: -0.344 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.397 Test loss: -0.391 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.858 Test loss: -0.393 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.869 Test loss:  0.018 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.724 Test loss:  0.160 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.916 Test loss: -0.075 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.541 Test loss:  0.136 Ensemble loss: -0.629 RMSE: 4.729 Num. networks: 15\n","Epoch: 5000, Train loss: -0.934 Test loss:  0.486 Ensemble loss: -0.647 RMSE: 4.542 Num. networks: 20\n","FOLD 8:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.701 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.400 Test loss: -0.117 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.811 Test loss: -0.134 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.880 Test loss: -0.113 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.769 Test loss: -0.020 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.400 Test loss:  0.015 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.782 Test loss: -0.372 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.408 Test loss: -0.488 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.140 Test loss: -0.533 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.796 Test loss:  2.168 Ensemble loss: -0.661 RMSE: 4.564 Num. networks: 15\n","Epoch: 5000, Train loss: -0.580 Test loss: -0.297 Ensemble loss: -0.614 RMSE: 4.565 Num. networks: 20\n","FOLD 9:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.390 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.176 Test loss: -0.236 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.804 Test loss: -0.429 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.439 Test loss: -0.322 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.401 Test loss: -0.637 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.717 Test loss: -0.476 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.871 Test loss: -0.442 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.745 Test loss:  0.158 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.304 Test loss:  1.274 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.866 Test loss:  0.281 Ensemble loss: -0.458 RMSE: 3.158 Num. networks: 15\n","Epoch: 5000, Train loss: -0.602 Test loss: -0.289 Ensemble loss: -0.469 RMSE: 3.049 Num. networks: 20\n","Train log. lik. = -1.399 +/-  0.089\n","Test  log. lik. = -1.601 +/-  0.070\n","Train RMSE      =  3.025 +/-  0.202\n","Test  RMSE      =  3.768 +/-  0.927\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"colab_type":"text","id":"vnGD_RefRxkZ"},"source":["# Concrete compressive dataset"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"s5G3-z_jRxkZ","colab":{"base_uri":"https://localhost:8080/","height":204},"outputId":"46238cb3-4c5e-4245-d22a-8238f3d95f4d","executionInfo":{"status":"ok","timestamp":1558119914525,"user_tz":-60,"elapsed":70012,"user":{"displayName":"Stratis Markou","photoUrl":"","userId":"09754366312766083286"}}},"source":["np.random.seed(0)\n","!wget \"https://archive.ics.uci.edu/ml/machine-learning-databases/concrete/compressive/Concrete_Data.xls\" --no-check-certificate\n","data = pd.read_excel('Concrete_Data.xls', header=0, delimiter=\"\\s+\").values\n","data = data[np.random.permutation(np.arange(len(data)))]"],"execution_count":83,"outputs":[{"output_type":"stream","text":["--2019-05-17 19:05:12--  https://archive.ics.uci.edu/ml/machine-learning-databases/concrete/compressive/Concrete_Data.xls\n","Resolving archive.ics.uci.edu (archive.ics.uci.edu)... 128.195.10.252\n","Connecting to archive.ics.uci.edu (archive.ics.uci.edu)|128.195.10.252|:443... connected.\n","HTTP request sent, awaiting response... 200 OK\n","Length: 124928 (122K) [application/x-httpd-php]\n","Saving to: ‘Concrete_Data.xls.3’\n","\n","Concrete_Data.xls.3 100%[===================>] 122.00K   290KB/s    in 0.4s    \n","\n","2019-05-17 19:05:13 (290 KB/s) - ‘Concrete_Data.xls.3’ saved [124928/124928]\n","\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"W2Y408i4LW97","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":2125},"outputId":"444f5d67-9d38-400f-a223-ea3540387b65","executionInfo":{"status":"ok","timestamp":1558119995285,"user_tz":-60,"elapsed":150615,"user":{"displayName":"Stratis Markou","photoUrl":"","userId":"09754366312766083286"}}},"source":["ensemble = train_mc_dropout(data=data, n_splits=10, burn_in=3000, mix_time=100, num_nets=20,\n","                            num_units=200, learn_rate=1e-1/len(data), weight_decay=100, log_every=500)"],"execution_count":84,"outputs":[{"output_type":"stream","text":["FOLD 0:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.489 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.022 Test loss:  0.103 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.390 Test loss: -0.371 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss:  0.317 Test loss: -0.043 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.407 Test loss: -0.327 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.380 Test loss: -0.333 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.322 Test loss: -0.205 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.345 Test loss: -0.295 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.183 Test loss: -0.205 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.342 Test loss: -0.347 Ensemble loss: -0.278 RMSE: 7.568 Num. networks: 15\n","Epoch: 5000, Train loss: -0.028 Test loss:  0.422 Ensemble loss: -0.264 RMSE: 7.548 Num. networks: 20\n","FOLD 1:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.460 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.242 Test loss: -0.380 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.422 Test loss: -0.518 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.286 Test loss:  0.356 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.193 Test loss: -0.331 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.370 Test loss: -0.449 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.445 Test loss: -0.446 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.396 Test loss: -0.412 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.386 Test loss: -0.488 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss:  0.196 Test loss:  1.401 Ensemble loss: -0.363 RMSE: 6.484 Num. networks: 15\n","Epoch: 5000, Train loss: -0.437 Test loss:  0.029 Ensemble loss: -0.307 RMSE: 6.347 Num. networks: 20\n","FOLD 2:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.428 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.027 Test loss:  0.179 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.456 Test loss: -0.188 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.465 Test loss: -0.028 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.421 Test loss: -0.270 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.395 Test loss: -0.362 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.305 Test loss: -0.333 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.051 Test loss:  0.250 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.314 Test loss: -0.282 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.181 Test loss: -0.280 Ensemble loss: -0.245 RMSE: 6.998 Num. networks: 15\n","Epoch: 5000, Train loss:  0.770 Test loss:  0.112 Ensemble loss: -0.254 RMSE: 7.003 Num. networks: 20\n","FOLD 3:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.463 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.313 Test loss: -0.334 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.052 Test loss:  0.426 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss:  0.057 Test loss: -0.193 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.382 Test loss: -0.210 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.420 Test loss: -0.344 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.426 Test loss: -0.262 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.377 Test loss: -0.317 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.284 Test loss: -0.129 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.407 Test loss: -0.297 Ensemble loss: -0.303 RMSE: 7.088 Num. networks: 15\n","Epoch: 5000, Train loss: -0.458 Test loss: -0.476 Ensemble loss: -0.303 RMSE: 7.088 Num. networks: 20\n","FOLD 4:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.554 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.389 Test loss: -0.351 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.410 Test loss: -0.419 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss:  0.642 Test loss:  0.709 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.138 Test loss: -0.259 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.382 Test loss: -0.383 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.279 Test loss: -0.332 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.382 Test loss: -0.376 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.285 Test loss: -0.205 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.420 Test loss: -0.295 Ensemble loss: -0.188 RMSE: 7.763 Num. networks: 15\n","Epoch: 5000, Train loss: -0.114 Test loss: -0.224 Ensemble loss: -0.224 RMSE: 7.438 Num. networks: 20\n","FOLD 5:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.546 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.365 Test loss: -0.256 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.435 Test loss: -0.290 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.370 Test loss: -0.266 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.421 Test loss: -0.276 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.423 Test loss: -0.206 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss:  0.387 Test loss: -0.053 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.214 Test loss: -0.273 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.045 Test loss: -0.159 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.048 Test loss: -0.141 Ensemble loss: -0.229 RMSE: 8.332 Num. networks: 15\n","Epoch: 5000, Train loss: -0.161 Test loss:  0.159 Ensemble loss: -0.252 RMSE: 8.204 Num. networks: 20\n","FOLD 6:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.527 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.159 Test loss: -0.222 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.397 Test loss: -0.352 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss:  0.190 Test loss: -0.133 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.334 Test loss: -0.333 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.437 Test loss: -0.529 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.029 Test loss: -0.288 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.380 Test loss: -0.413 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  0.198 Test loss:  0.036 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.252 Test loss: -0.297 Ensemble loss: -0.247 RMSE: 7.084 Num. networks: 15\n","Epoch: 5000, Train loss: -0.334 Test loss: -0.320 Ensemble loss: -0.272 RMSE: 7.182 Num. networks: 20\n","FOLD 7:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.479 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.440 Test loss: -0.419 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.206 Test loss:  0.299 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.444 Test loss: -0.361 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.427 Test loss: -0.287 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.356 Test loss: -0.100 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.394 Test loss: -0.295 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.325 Test loss: -0.335 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.412 Test loss: -0.336 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.181 Test loss: -0.199 Ensemble loss: -0.292 RMSE: 8.200 Num. networks: 15\n","Epoch: 5000, Train loss: -0.373 Test loss:  0.230 Ensemble loss: -0.245 RMSE: 8.061 Num. networks: 20\n","FOLD 8:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.482 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.019 Test loss: -0.026 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.266 Test loss:  0.011 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.336 Test loss:  0.166 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  0.906 Test loss:  0.536 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.175 Test loss: -0.327 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.447 Test loss: -0.447 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.329 Test loss: -0.397 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  0.545 Test loss:  0.080 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.430 Test loss: -0.482 Ensemble loss: -0.202 RMSE: 7.103 Num. networks: 15\n","Epoch: 5000, Train loss: -0.190 Test loss: -0.274 Ensemble loss: -0.244 RMSE: 7.038 Num. networks: 20\n","FOLD 9:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.587 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.398 Test loss: -0.365 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  1.301 Test loss:  0.409 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss:  0.733 Test loss:  0.034 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.374 Test loss: -0.003 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.278 Test loss: -0.253 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.451 Test loss: -0.320 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.409 Test loss: -0.382 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.410 Test loss: -0.119 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.048 Test loss:  0.602 Ensemble loss: -0.075 RMSE: 8.279 Num. networks: 15\n","Epoch: 5000, Train loss:  0.095 Test loss: -0.045 Ensemble loss: -0.103 RMSE: 8.257 Num. networks: 20\n","Train log. lik. = -2.546 +/-  0.042\n","Test  log. lik. = -2.568 +/-  0.051\n","Train RMSE      =  7.104 +/-  0.111\n","Test  RMSE      =  7.417 +/-  0.581\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"colab_type":"text","id":"uxatwTIqVrZ1"},"source":["# Energy efficiency dataset"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"pZDQX5l3ZfLV","colab":{"base_uri":"https://localhost:8080/","height":221},"outputId":"21f38fd3-068e-4226-a185-167272083481","executionInfo":{"status":"ok","timestamp":1558131824046,"user_tz":-60,"elapsed":3646,"user":{"displayName":"Stratis Markou","photoUrl":"","userId":"09754366312766083286"}}},"source":["np.random.seed(0)\n","!wget \"http://archive.ics.uci.edu/ml/machine-learning-databases/00242/ENB2012_data.xlsx\" --no-check-certificate\n","data = pd.read_excel('ENB2012_data.xlsx', header=0, delimiter=\"\\s+\").values\n","data = data[np.random.permutation(np.arange(len(data)))]\n","data.shape"],"execution_count":15,"outputs":[{"output_type":"stream","text":["--2019-05-17 22:23:41--  http://archive.ics.uci.edu/ml/machine-learning-databases/00242/ENB2012_data.xlsx\n","Resolving archive.ics.uci.edu (archive.ics.uci.edu)... 128.195.10.252\n","Connecting to archive.ics.uci.edu (archive.ics.uci.edu)|128.195.10.252|:80... connected.\n","HTTP request sent, awaiting response... 200 OK\n","Length: 76189 (74K) [application/x-httpd-php]\n","Saving to: ‘ENB2012_data.xlsx.2’\n","\n","ENB2012_data.xlsx.2 100%[===================>]  74.40K   233KB/s    in 0.3s    \n","\n","2019-05-17 22:23:42 (233 KB/s) - ‘ENB2012_data.xlsx.2’ saved [76189/76189]\n","\n"],"name":"stdout"},{"output_type":"execute_result","data":{"text/plain":["(768, 10)"]},"metadata":{"tags":[]},"execution_count":15}]},{"cell_type":"code","metadata":{"colab_type":"code","id":"ajazc6zeZfLY","colab":{"base_uri":"https://localhost:8080/","height":2125},"outputId":"69dbdc8c-755f-4198-eb14-3c0ff41a4800","executionInfo":{"status":"ok","timestamp":1558131898776,"user_tz":-60,"elapsed":77955,"user":{"displayName":"Stratis Markou","photoUrl":"","userId":"09754366312766083286"}}},"source":["ensemble = train_mc_dropout(data=data, n_splits=10, burn_in=3000, mix_time=100, num_nets=20,\n","                            num_units=200, learn_rate=1e-1/len(data), weight_decay=100, log_every=500)"],"execution_count":16,"outputs":[{"output_type":"stream","text":["FOLD 0:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.549 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.005 Test loss: -0.321 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.867 Test loss: -0.902 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.846 Test loss:  0.741 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  1.190 Test loss: -0.036 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss:  0.085 Test loss:  1.299 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.531 Test loss: -0.558 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  2.104 Test loss:  0.251 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.547 Test loss: -0.096 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.263 Test loss: -0.308 Ensemble loss: -0.162 RMSE: 3.640 Num. networks: 15\n","Epoch: 5000, Train loss: -0.307 Test loss: -0.458 Ensemble loss: -0.131 RMSE: 3.504 Num. networks: 20\n","FOLD 1:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.498 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.274 Test loss:  0.283 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.333 Test loss: -0.379 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.204 Test loss:  0.355 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.393 Test loss:  0.721 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.867 Test loss: -0.301 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.853 Test loss: -0.865 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.346 Test loss: -0.409 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.970 Test loss: -0.747 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.570 Test loss: -0.469 Ensemble loss: -0.059 RMSE: 3.951 Num. networks: 15\n","Epoch: 5000, Train loss:  0.271 Test loss:  0.202 Ensemble loss: -0.095 RMSE: 3.922 Num. networks: 20\n","FOLD 2:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.456 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.151 Test loss: -0.334 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.613 Test loss: -0.775 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.948 Test loss: -0.988 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.479 Test loss: -0.710 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss:  0.292 Test loss:  0.264 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss:  0.637 Test loss:  0.500 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.105 Test loss: -0.082 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.444 Test loss: -0.658 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.088 Test loss: -0.230 Ensemble loss: -0.227 RMSE: 3.110 Num. networks: 15\n","Epoch: 5000, Train loss: -0.026 Test loss: -0.040 Ensemble loss: -0.162 RMSE: 3.370 Num. networks: 20\n","FOLD 3:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.414 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.912 Test loss: -0.709 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.721 Test loss:  0.562 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.706 Test loss: -0.789 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.660 Test loss: -0.779 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.858 Test loss: -0.942 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.994 Test loss: -0.875 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.133 Test loss: -0.038 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.597 Test loss: -0.795 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.816 Test loss: -0.851 Ensemble loss: -0.313 RMSE: 2.862 Num. networks: 15\n","Epoch: 5000, Train loss: -0.768 Test loss: -0.926 Ensemble loss: -0.303 RMSE: 2.548 Num. networks: 20\n","FOLD 4:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.555 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.304 Test loss: -0.451 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.039 Test loss: -0.136 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.448 Test loss:  1.257 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.201 Test loss: -0.151 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.684 Test loss: -0.709 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.134 Test loss: -0.255 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.158 Test loss:  0.026 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  0.018 Test loss: -0.069 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.451 Test loss: -0.492 Ensemble loss: -0.196 RMSE: 3.525 Num. networks: 15\n","Epoch: 5000, Train loss: -0.837 Test loss: -0.860 Ensemble loss: -0.222 RMSE: 3.436 Num. networks: 20\n","FOLD 5:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.488 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.443 Test loss: -0.532 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.065 Test loss: -0.021 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.967 Test loss: -0.846 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.214 Test loss: -0.356 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.232 Test loss: -0.356 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss:  0.125 Test loss: -0.080 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.178 Test loss:  0.137 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  0.201 Test loss:  0.103 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.643 Test loss: -0.704 Ensemble loss: -0.124 RMSE: 3.426 Num. networks: 15\n","Epoch: 5000, Train loss: -0.437 Test loss: -0.506 Ensemble loss: -0.183 RMSE: 3.317 Num. networks: 20\n","FOLD 6:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.458 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.716 Test loss: -0.158 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.198 Test loss:  0.152 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.503 Test loss: -0.603 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.683 Test loss:  0.100 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.424 Test loss: -0.491 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.350 Test loss: -0.438 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  1.582 Test loss:  0.412 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  0.290 Test loss: -0.334 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.531 Test loss: -0.647 Ensemble loss: -0.252 RMSE: 3.182 Num. networks: 15\n","Epoch: 5000, Train loss:  0.581 Test loss:  2.538 Ensemble loss: -0.145 RMSE: 3.451 Num. networks: 20\n","FOLD 7:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.566 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.698 Test loss: -0.728 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.731 Test loss: -0.707 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.585 Test loss: -0.612 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.776 Test loss:  0.628 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss:  0.055 Test loss:  0.066 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.567 Test loss: -0.688 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.608 Test loss:  0.488 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  0.098 Test loss:  0.014 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss:  0.222 Test loss:  0.153 Ensemble loss: -0.068 RMSE: 3.832 Num. networks: 15\n","Epoch: 5000, Train loss:  0.218 Test loss:  0.182 Ensemble loss: -0.094 RMSE: 3.672 Num. networks: 20\n","FOLD 8:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.575 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.624 Test loss:  0.268 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.420 Test loss: -0.234 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.475 Test loss: -0.677 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  0.489 Test loss:  0.675 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.378 Test loss: -0.197 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.059 Test loss: -0.158 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  1.470 Test loss:  1.876 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.582 Test loss: -0.730 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.324 Test loss: -0.134 Ensemble loss:  0.004 RMSE: 3.894 Num. networks: 15\n","Epoch: 5000, Train loss: -0.699 Test loss: -0.342 Ensemble loss: -0.074 RMSE: 3.766 Num. networks: 20\n","FOLD 9:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.473 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.172 Test loss:  0.023 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.021 Test loss: -0.089 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.321 Test loss: -0.441 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  0.406 Test loss:  0.267 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.297 Test loss: -0.451 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.864 Test loss: -0.815 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.020 Test loss: -0.082 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.461 Test loss: -0.644 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.038 Test loss: -0.186 Ensemble loss: -0.103 RMSE: 3.466 Num. networks: 15\n","Epoch: 5000, Train loss: -0.538 Test loss:  3.479 Ensemble loss: -0.069 RMSE: 3.638 Num. networks: 20\n","Train log. lik. = -2.105 +/-  0.060\n","Test  log. lik. = -2.104 +/-  0.067\n","Train RMSE      =  3.428 +/-  0.269\n","Test  RMSE      =  3.462 +/-  0.353\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"id":"vFsWDzx8NSry","colab_type":"text"},"source":["# Power dataset"]},{"cell_type":"code","metadata":{"id":"hzdZLChPNQ9X","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":221},"outputId":"612cf23e-df07-4679-88dc-a4ef2ce03f7f","executionInfo":{"status":"ok","timestamp":1558120086760,"user_tz":-60,"elapsed":240887,"user":{"displayName":"Stratis Markou","photoUrl":"","userId":"09754366312766083286"}}},"source":["np.random.seed(0)\n","!wget \"https://archive.ics.uci.edu/ml/machine-learning-databases/00294/CCPP.zip\" --no-check-certificate \n","zipped = zipfile.ZipFile(\"CCPP.zip\")\n","data = pd.read_excel(zipped.open('CCPP/Folds5x2_pp.xlsx'), header=0, delimiter=\"\\t\").values\n","np.random.shuffle(data)\n","data.shape"],"execution_count":87,"outputs":[{"output_type":"stream","text":["--2019-05-17 19:08:01--  https://archive.ics.uci.edu/ml/machine-learning-databases/00294/CCPP.zip\n","Resolving archive.ics.uci.edu (archive.ics.uci.edu)... 128.195.10.252\n","Connecting to archive.ics.uci.edu (archive.ics.uci.edu)|128.195.10.252|:443... connected.\n","HTTP request sent, awaiting response... 200 OK\n","Length: 3674852 (3.5M) [application/x-httpd-php]\n","Saving to: ‘CCPP.zip.2’\n","\n","CCPP.zip.2          100%[===================>]   3.50M  3.13MB/s    in 1.1s    \n","\n","2019-05-17 19:08:03 (3.13 MB/s) - ‘CCPP.zip.2’ saved [3674852/3674852]\n","\n"],"name":"stdout"},{"output_type":"execute_result","data":{"text/plain":["(9568, 5)"]},"metadata":{"tags":[]},"execution_count":87}]},{"cell_type":"code","metadata":{"id":"ld95mVAKyvo9","colab_type":"code","colab":{"base_uri":"https://localhost:8080/","height":2125},"outputId":"034e6533-2401-401d-cae3-1d087c1b1869","executionInfo":{"status":"ok","timestamp":1558120198804,"user_tz":-60,"elapsed":352787,"user":{"displayName":"Stratis Markou","photoUrl":"","userId":"09754366312766083286"}}},"source":["ensemble = train_mc_dropout(data=data, n_splits=10, burn_in=3000, mix_time=100, num_nets=20,\n","                            num_units=200, learn_rate=1e-1/len(data), weight_decay=100, log_every=500)"],"execution_count":88,"outputs":[{"output_type":"stream","text":["FOLD 0:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.528 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.827 Test loss: -0.866 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.854 Test loss: -0.901 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.914 Test loss: -0.863 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.915 Test loss: -0.938 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.907 Test loss: -0.866 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.719 Test loss: -0.845 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.747 Test loss: -0.853 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.896 Test loss: -0.919 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.858 Test loss: -0.676 Ensemble loss: -0.873 RMSE: 3.952 Num. networks: 15\n","Epoch: 5000, Train loss: -0.934 Test loss: -0.951 Ensemble loss: -0.877 RMSE: 3.946 Num. networks: 20\n","FOLD 1:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.478 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.875 Test loss: -0.903 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.892 Test loss: -0.846 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.913 Test loss: -0.931 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.434 Test loss: -0.778 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.520 Test loss: -0.814 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.923 Test loss: -0.925 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.886 Test loss: -0.919 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.928 Test loss: -0.869 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.733 Test loss: -0.559 Ensemble loss: -0.887 RMSE: 4.040 Num. networks: 15\n","Epoch: 5000, Train loss: -0.884 Test loss: -0.908 Ensemble loss: -0.901 RMSE: 4.041 Num. networks: 20\n","FOLD 2:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.503 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.853 Test loss: -0.759 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.651 Test loss: -0.400 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.289 Test loss: -0.411 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.744 Test loss: -0.485 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.911 Test loss: -0.898 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.902 Test loss: -0.799 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.723 Test loss: -0.802 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.917 Test loss: -0.886 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.878 Test loss: -0.877 Ensemble loss: -0.837 RMSE: 4.399 Num. networks: 15\n","Epoch: 5000, Train loss: -0.347 Test loss: -0.730 Ensemble loss: -0.856 RMSE: 4.273 Num. networks: 20\n","FOLD 3:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.530 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.867 Test loss: -0.873 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.899 Test loss: -0.939 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.878 Test loss: -0.922 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.932 Test loss: -0.956 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.903 Test loss: -0.847 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.730 Test loss: -0.588 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.528 Test loss: -0.737 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.937 Test loss: -0.969 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.764 Test loss: -0.714 Ensemble loss: -0.894 RMSE: 4.093 Num. networks: 15\n","Epoch: 5000, Train loss: -0.835 Test loss: -0.712 Ensemble loss: -0.892 RMSE: 4.111 Num. networks: 20\n","FOLD 4:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.526 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.906 Test loss: -0.851 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.861 Test loss: -0.854 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.926 Test loss: -0.880 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.919 Test loss: -0.886 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.809 Test loss: -0.589 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.541 Test loss: -0.787 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.874 Test loss: -0.861 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.797 Test loss: -0.635 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.944 Test loss: -0.888 Ensemble loss: -0.873 RMSE: 4.378 Num. networks: 15\n","Epoch: 5000, Train loss: -0.935 Test loss: -0.872 Ensemble loss: -0.866 RMSE: 4.347 Num. networks: 20\n","FOLD 5:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.488 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.780 Test loss: -0.870 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.913 Test loss: -0.914 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.631 Test loss: -0.834 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.936 Test loss: -0.946 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.867 Test loss: -0.909 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.930 Test loss: -0.950 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.897 Test loss: -0.934 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.886 Test loss: -0.812 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.914 Test loss: -0.850 Ensemble loss: -0.874 RMSE: 3.968 Num. networks: 15\n","Epoch: 5000, Train loss: -0.596 Test loss: -0.560 Ensemble loss: -0.873 RMSE: 3.995 Num. networks: 20\n","FOLD 6:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.502 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.906 Test loss: -0.873 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.908 Test loss: -0.881 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.751 Test loss: -0.820 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.751 Test loss: -0.349 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.938 Test loss: -0.852 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.829 Test loss: -0.840 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.950 Test loss: -0.888 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.939 Test loss: -0.838 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.797 Test loss: -0.840 Ensemble loss: -0.854 RMSE: 4.375 Num. networks: 15\n","Epoch: 5000, Train loss: -0.881 Test loss: -0.858 Ensemble loss: -0.865 RMSE: 4.368 Num. networks: 20\n","FOLD 7:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.462 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.716 Test loss: -0.836 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.634 Test loss: -0.486 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.935 Test loss: -0.916 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.440 Test loss: -0.352 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.863 Test loss: -0.892 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.900 Test loss: -0.909 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.928 Test loss: -0.923 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.421 Test loss: -0.796 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.920 Test loss: -0.918 Ensemble loss: -0.858 RMSE: 4.120 Num. networks: 15\n","Epoch: 5000, Train loss: -0.844 Test loss: -0.873 Ensemble loss: -0.878 RMSE: 4.064 Num. networks: 20\n","FOLD 8:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.490 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.724 Test loss: -0.792 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.468 Test loss: -0.789 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.641 Test loss: -0.781 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.561 Test loss:  0.004 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.922 Test loss: -0.887 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.868 Test loss: -0.910 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.926 Test loss: -0.933 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.882 Test loss: -0.784 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.805 Test loss: -0.868 Ensemble loss: -0.888 RMSE: 4.257 Num. networks: 15\n","Epoch: 5000, Train loss: -0.937 Test loss: -0.951 Ensemble loss: -0.884 RMSE: 4.275 Num. networks: 20\n","FOLD 9:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.493 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.912 Test loss: -0.854 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.926 Test loss: -0.866 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.614 Test loss: -0.806 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.395 Test loss: -0.207 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.941 Test loss: -0.885 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.660 Test loss: -0.136 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.658 Test loss: -0.810 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.902 Test loss: -0.874 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.878 Test loss: -0.637 Ensemble loss: -0.845 RMSE: 4.366 Num. networks: 15\n","Epoch: 5000, Train loss: -0.786 Test loss: -0.537 Ensemble loss: -0.844 RMSE: 4.342 Num. networks: 20\n","Train log. lik. = -1.957 +/-  0.014\n","Test  log. lik. = -1.964 +/-  0.016\n","Train RMSE      =  4.153 +/-  0.097\n","Test  RMSE      =  4.176 +/-  0.153\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"colab_type":"text","id":"HRPICBiXCegI"},"source":["# Red wine dataset"]},{"cell_type":"code","metadata":{"colab_type":"code","id":"KOqgIBXcCegJ","colab":{"base_uri":"https://localhost:8080/","height":221},"outputId":"ab4b617f-22c9-47c9-e85e-f6a8cb7e4b13","executionInfo":{"status":"ok","timestamp":1558120203240,"user_tz":-60,"elapsed":356964,"user":{"displayName":"Stratis Markou","photoUrl":"","userId":"09754366312766083286"}}},"source":["np.random.seed(0)\n","!wget \"https://archive.ics.uci.edu/ml/machine-learning-databases/wine-quality/winequality-red.csv\" --no-check-certificate \n","data = pd.read_csv('winequality-red.csv', header=1, delimiter=';').values\n","data = data[np.random.permutation(np.arange(len(data)))]\n","data.shape"],"execution_count":89,"outputs":[{"output_type":"stream","text":["--2019-05-17 19:09:59--  https://archive.ics.uci.edu/ml/machine-learning-databases/wine-quality/winequality-red.csv\n","Resolving archive.ics.uci.edu (archive.ics.uci.edu)... 128.195.10.252\n","Connecting to archive.ics.uci.edu (archive.ics.uci.edu)|128.195.10.252|:443... connected.\n","HTTP request sent, awaiting response... 200 OK\n","Length: 84199 (82K) [application/x-httpd-php]\n","Saving to: ‘winequality-red.csv.2’\n","\n","\rwinequality-red.csv   0%[                    ]       0  --.-KB/s               \rwinequality-red.csv  47%[========>           ]  39.00K   138KB/s               \rwinequality-red.csv 100%[===================>]  82.23K   291KB/s    in 0.3s    \n","\n","2019-05-17 19:10:00 (291 KB/s) - ‘winequality-red.csv.2’ saved [84199/84199]\n","\n"],"name":"stdout"},{"output_type":"execute_result","data":{"text/plain":["(1598, 12)"]},"metadata":{"tags":[]},"execution_count":89}]},{"cell_type":"code","metadata":{"colab_type":"code","id":"h8gWjBbKCegM","colab":{"base_uri":"https://localhost:8080/","height":2125},"outputId":"be100618-9673-474d-bd0c-150e48f2baf9","executionInfo":{"status":"ok","timestamp":1558120284603,"user_tz":-60,"elapsed":438177,"user":{"displayName":"Stratis Markou","photoUrl":"","userId":"09754366312766083286"}}},"source":["ensemble = train_mc_dropout(data=data, n_splits=10, burn_in=3000, mix_time=100, num_nets=20,\n","                            num_units=200, learn_rate=1e-1/len(data), weight_decay=100, log_every=500)"],"execution_count":90,"outputs":[{"output_type":"stream","text":["FOLD 0:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.556 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.178 Test loss:  0.368 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.160 Test loss:  0.348 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss:  0.183 Test loss:  0.348 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  0.186 Test loss:  0.335 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss:  0.181 Test loss:  0.323 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss:  0.173 Test loss:  0.375 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.180 Test loss:  0.361 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  0.184 Test loss:  0.357 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss:  0.198 Test loss:  0.283 Ensemble loss:  0.309 RMSE: 0.690 Num. networks: 15\n","Epoch: 5000, Train loss:  0.179 Test loss:  0.326 Ensemble loss:  0.303 RMSE: 0.690 Num. networks: 20\n","FOLD 1:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.407 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.191 Test loss:  0.106 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.178 Test loss:  0.083 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss:  0.174 Test loss:  0.084 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  0.192 Test loss:  0.074 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss:  0.190 Test loss:  0.069 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss:  0.186 Test loss:  0.091 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.183 Test loss:  0.100 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  0.187 Test loss:  0.064 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss:  0.176 Test loss:  0.103 Ensemble loss:  0.069 RMSE: 0.558 Num. networks: 15\n","Epoch: 5000, Train loss:  0.175 Test loss:  0.096 Ensemble loss:  0.068 RMSE: 0.559 Num. networks: 20\n","FOLD 2:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.585 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.176 Test loss:  0.320 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.179 Test loss:  0.380 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss:  0.188 Test loss:  0.347 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  0.184 Test loss:  0.359 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss:  0.177 Test loss:  0.341 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss:  0.177 Test loss:  0.367 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.186 Test loss:  0.307 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  0.180 Test loss:  0.289 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss:  0.162 Test loss:  0.402 Ensemble loss:  0.293 RMSE: 0.683 Num. networks: 15\n","Epoch: 5000, Train loss:  0.189 Test loss:  0.353 Ensemble loss:  0.292 RMSE: 0.683 Num. networks: 20\n","FOLD 3:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.558 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.179 Test loss:  0.335 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.183 Test loss:  0.444 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss:  0.174 Test loss:  0.391 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  0.183 Test loss:  0.425 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss:  0.187 Test loss:  0.518 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss:  0.299 Test loss:  0.320 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.181 Test loss:  0.315 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  0.171 Test loss:  0.355 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss:  0.179 Test loss:  0.501 Ensemble loss:  0.318 RMSE: 0.732 Num. networks: 15\n","Epoch: 5000, Train loss:  0.196 Test loss:  0.420 Ensemble loss:  0.314 RMSE: 0.731 Num. networks: 20\n","FOLD 4:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.393 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.170 Test loss:  0.205 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.176 Test loss:  0.212 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss:  0.169 Test loss:  0.189 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  0.173 Test loss:  0.194 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss:  0.177 Test loss:  0.229 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss:  0.167 Test loss:  0.192 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.179 Test loss:  0.228 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  0.193 Test loss:  0.241 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss:  0.177 Test loss:  0.179 Ensemble loss:  0.174 RMSE: 0.605 Num. networks: 15\n","Epoch: 5000, Train loss:  0.190 Test loss:  0.214 Ensemble loss:  0.173 RMSE: 0.606 Num. networks: 20\n","FOLD 5:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.634 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.184 Test loss:  0.406 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.186 Test loss:  0.399 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss:  0.177 Test loss:  0.392 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  0.184 Test loss:  0.409 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss:  0.176 Test loss:  0.333 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss:  0.184 Test loss:  0.391 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.194 Test loss:  0.555 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  0.174 Test loss:  0.411 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss:  0.188 Test loss:  0.377 Ensemble loss:  0.317 RMSE: 0.682 Num. networks: 15\n","Epoch: 5000, Train loss:  0.188 Test loss:  0.340 Ensemble loss:  0.319 RMSE: 0.686 Num. networks: 20\n","FOLD 6:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.345 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.193 Test loss:  0.048 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.173 Test loss:  0.124 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss:  0.177 Test loss:  0.077 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  0.168 Test loss:  0.065 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss:  0.165 Test loss:  0.054 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss:  0.181 Test loss:  0.080 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.171 Test loss:  0.117 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  0.169 Test loss:  0.228 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss:  0.160 Test loss:  0.234 Ensemble loss:  0.082 RMSE: 0.541 Num. networks: 15\n","Epoch: 5000, Train loss:  0.169 Test loss:  0.069 Ensemble loss:  0.081 RMSE: 0.539 Num. networks: 20\n","FOLD 7:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.565 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.189 Test loss:  0.212 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.190 Test loss:  0.224 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss:  0.182 Test loss:  0.277 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  0.182 Test loss:  0.303 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss:  0.194 Test loss:  0.196 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss:  0.185 Test loss:  0.384 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.169 Test loss:  0.226 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  0.183 Test loss:  0.327 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss:  0.189 Test loss:  0.298 Ensemble loss:  0.233 RMSE: 0.635 Num. networks: 15\n","Epoch: 5000, Train loss:  0.197 Test loss:  0.403 Ensemble loss:  0.232 RMSE: 0.634 Num. networks: 20\n","FOLD 8:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.467 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.188 Test loss:  0.274 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.165 Test loss:  0.226 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss:  0.184 Test loss:  0.213 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  0.175 Test loss:  0.288 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss:  0.181 Test loss:  0.229 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss:  0.173 Test loss:  0.318 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.180 Test loss:  0.245 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  0.179 Test loss:  0.199 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss:  0.178 Test loss:  0.194 Ensemble loss:  0.212 RMSE: 0.587 Num. networks: 15\n","Epoch: 5000, Train loss:  0.180 Test loss:  0.305 Ensemble loss:  0.213 RMSE: 0.589 Num. networks: 20\n","FOLD 9:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.528 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.179 Test loss:  0.215 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.191 Test loss:  0.224 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss:  0.179 Test loss:  0.259 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  0.182 Test loss:  0.222 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss:  0.188 Test loss:  0.222 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss:  0.185 Test loss:  0.236 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.194 Test loss:  0.216 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  0.188 Test loss:  0.183 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss:  0.177 Test loss:  0.231 Ensemble loss:  0.186 RMSE: 0.598 Num. networks: 15\n","Epoch: 5000, Train loss:  0.191 Test loss:  0.232 Ensemble loss:  0.191 RMSE: 0.600 Num. networks: 20\n","Train log. lik. =  0.059 +/-  0.008\n","Test  log. lik. = -0.005 +/-  0.079\n","Train RMSE      =  0.615 +/-  0.006\n","Test  RMSE      =  0.632 +/-  0.060\n"],"name":"stdout"}]},{"cell_type":"markdown","metadata":{"colab_type":"text","id":"PVZw0uJzDgdy"},"source":["# Yacht dataset"]},{"cell_type":"code","metadata":{"colab_type":"code","outputId":"84c78c17-d62a-4f4e-998c-48d8950875a6","executionInfo":{"status":"ok","timestamp":1558120287109,"user_tz":-60,"elapsed":440170,"user":{"displayName":"Stratis Markou","photoUrl":"","userId":"09754366312766083286"}},"id":"ja_kIet3Dgdz","colab":{"base_uri":"https://localhost:8080/","height":221}},"source":["np.random.seed(0)\n","!wget \"http://archive.ics.uci.edu/ml/machine-learning-databases/00243/yacht_hydrodynamics.data\" --no-check-certificate \n","data = pd.read_csv('yacht_hydrodynamics.data', header=1, delimiter='\\s+').values\n","data = data[np.random.permutation(np.arange(len(data)))]\n","data.shape"],"execution_count":91,"outputs":[{"output_type":"stream","text":["--2019-05-17 19:11:25--  http://archive.ics.uci.edu/ml/machine-learning-databases/00243/yacht_hydrodynamics.data\n","Resolving archive.ics.uci.edu (archive.ics.uci.edu)... 128.195.10.252\n","Connecting to archive.ics.uci.edu (archive.ics.uci.edu)|128.195.10.252|:80... connected.\n","HTTP request sent, awaiting response... 200 OK\n","Length: 11487 (11K) [application/x-httpd-php]\n","Saving to: ‘yacht_hydrodynamics.data.2’\n","\n","\r          yacht_hyd   0%[                    ]       0  --.-KB/s               \ryacht_hydrodynamics 100%[===================>]  11.22K  --.-KB/s    in 0s      \n","\n","2019-05-17 19:11:25 (281 MB/s) - ‘yacht_hydrodynamics.data.2’ saved [11487/11487]\n","\n"],"name":"stdout"},{"output_type":"execute_result","data":{"text/plain":["(306, 7)"]},"metadata":{"tags":[]},"execution_count":91}]},{"cell_type":"code","metadata":{"colab_type":"code","outputId":"1fee603a-5657-4a63-a306-374a96f7d17a","executionInfo":{"status":"ok","timestamp":1558120366739,"user_tz":-60,"elapsed":519654,"user":{"displayName":"Stratis Markou","photoUrl":"","userId":"09754366312766083286"}},"id":"clxNDH6dDgd3","colab":{"base_uri":"https://localhost:8080/","height":2125}},"source":["ensemble = train_mc_dropout(data=data, n_splits=10, burn_in=3000, mix_time=100, num_nets=20,\n","                            num_units=200, learn_rate=1e-1/len(data), weight_decay=100, log_every=500)"],"execution_count":92,"outputs":[{"output_type":"stream","text":["FOLD 0:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.139 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.706 Test loss: -1.005 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.491 Test loss: -0.710 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.675 Test loss: -0.509 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  1.513 Test loss:  0.628 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.405 Test loss: -0.029 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.394 Test loss: -0.688 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.028 Test loss: -0.399 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.224 Test loss:  1.235 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.481 Test loss: -0.823 Ensemble loss: -0.277 RMSE: 3.487 Num. networks: 15\n","Epoch: 5000, Train loss:  0.779 Test loss:  0.720 Ensemble loss: -0.099 RMSE: 4.579 Num. networks: 20\n","FOLD 1:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.544 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.855 Test loss:  0.694 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.104 Test loss: -0.145 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss:  0.251 Test loss:  0.096 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.199 Test loss: -0.246 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.584 Test loss: -0.534 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss:  1.140 Test loss:  0.878 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.422 Test loss:  0.240 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.654 Test loss: -0.438 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.410 Test loss: -0.357 Ensemble loss: -0.114 RMSE: 8.262 Num. networks: 15\n","Epoch: 5000, Train loss:  0.834 Test loss:  0.640 Ensemble loss: -0.060 RMSE: 8.372 Num. networks: 20\n","FOLD 2:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.407 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.453 Test loss:  0.393 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.637 Test loss: -0.810 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.494 Test loss: -0.597 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  0.178 Test loss: -0.069 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.476 Test loss: -0.630 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss:  0.175 Test loss:  0.126 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.428 Test loss: -0.569 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.455 Test loss: -0.358 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.378 Test loss: -0.603 Ensemble loss: -0.199 RMSE: 7.344 Num. networks: 15\n","Epoch: 5000, Train loss:  1.263 Test loss:  1.204 Ensemble loss: -0.008 RMSE: 7.201 Num. networks: 20\n","FOLD 3:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.518 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.019 Test loss:  0.025 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.515 Test loss: -0.116 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.686 Test loss: -0.617 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.083 Test loss: -0.134 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.415 Test loss: -0.419 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.456 Test loss: -0.403 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.070 Test loss: -0.176 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.259 Test loss: -0.056 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.028 Test loss:  0.937 Ensemble loss: -0.190 RMSE: 8.652 Num. networks: 15\n","Epoch: 5000, Train loss: -0.637 Test loss: -0.596 Ensemble loss: -0.217 RMSE: 8.740 Num. networks: 20\n","FOLD 4:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.386 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.034 Test loss: -0.125 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.490 Test loss:  0.314 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.600 Test loss:  0.313 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  0.177 Test loss: -0.020 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss:  0.363 Test loss:  0.198 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.471 Test loss: -0.656 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.005 Test loss: -0.131 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.108 Test loss: -0.413 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.096 Test loss: -0.205 Ensemble loss: -0.188 RMSE: 6.770 Num. networks: 15\n","Epoch: 5000, Train loss: -0.369 Test loss: -0.481 Ensemble loss: -0.280 RMSE: 6.598 Num. networks: 20\n","FOLD 5:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.393 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.445 Test loss: -0.534 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.540 Test loss:  0.330 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.611 Test loss: -0.124 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.176 Test loss: -0.283 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss:  0.400 Test loss:  0.237 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.423 Test loss: -0.535 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.556 Test loss:  0.455 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.147 Test loss: -0.351 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.416 Test loss: -0.481 Ensemble loss:  0.406 RMSE: 7.862 Num. networks: 15\n","Epoch: 5000, Train loss: -0.484 Test loss: -0.549 Ensemble loss:  0.268 RMSE: 7.167 Num. networks: 20\n","FOLD 6:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.632 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.740 Test loss:  0.647 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.201 Test loss: -0.094 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss:  1.511 Test loss:  0.616 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.513 Test loss: -0.346 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss:  0.768 Test loss: -0.104 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.591 Test loss: -0.420 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.184 Test loss:  0.201 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  0.001 Test loss:  0.011 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.132 Test loss: -0.052 Ensemble loss: -0.189 RMSE: 10.377 Num. networks: 15\n","Epoch: 5000, Train loss: -0.339 Test loss: -0.332 Ensemble loss: -0.183 RMSE: 10.288 Num. networks: 20\n","FOLD 7:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.386 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.447 Test loss: -0.447 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.135 Test loss: -0.043 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.570 Test loss: -0.380 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss: -0.623 Test loss: -0.771 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.563 Test loss: -0.626 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss:  0.091 Test loss: -0.230 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss: -0.440 Test loss: -0.545 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss: -0.763 Test loss: -0.863 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.021 Test loss: -0.117 Ensemble loss: -0.275 RMSE: 6.343 Num. networks: 15\n","Epoch: 5000, Train loss: -0.306 Test loss: -0.566 Ensemble loss: -0.169 RMSE: 6.551 Num. networks: 20\n","FOLD 8:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.785 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss:  0.580 Test loss:  0.567 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss: -0.106 Test loss: -0.010 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.288 Test loss: -0.290 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  0.673 Test loss:  0.639 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss:  0.189 Test loss:  0.297 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.466 Test loss: -0.442 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.514 Test loss:  0.562 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  1.738 Test loss:  1.610 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss:  0.543 Test loss:  2.109 Ensemble loss:  0.421 RMSE: 11.683 Num. networks: 15\n","Epoch: 5000, Train loss: -0.567 Test loss: -0.466 Ensemble loss:  0.323 RMSE: 11.069 Num. networks: 20\n","FOLD 9:\n","Epoch:    0, Train loss:  0.500 Test loss:  0.926 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch:  500, Train loss: -0.416 Test loss: -0.163 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1000, Train loss:  0.168 Test loss:  0.387 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 1500, Train loss: -0.493 Test loss: -0.082 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2000, Train loss:  0.593 Test loss:  0.593 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 2500, Train loss: -0.642 Test loss: -0.154 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3000, Train loss: -0.124 Test loss:  0.174 Ensemble loss:    nan RMSE: nan Num. networks:  0\n","Epoch: 3500, Train loss:  0.306 Test loss:  0.254 Ensemble loss:    nan RMSE: nan Num. networks:  5\n","Epoch: 4000, Train loss:  1.722 Test loss:  1.612 Ensemble loss:    nan RMSE: nan Num. networks: 10\n","Epoch: 4500, Train loss: -0.685 Test loss:  0.047 Ensemble loss:  0.254 RMSE: 11.307 Num. networks: 15\n","Epoch: 5000, Train loss: -0.133 Test loss:  2.512 Ensemble loss:  0.278 RMSE: 10.990 Num. networks: 20\n","Train log. lik. = -2.695 +/-  0.197\n","Test  log. lik. = -2.704 +/-  0.203\n","Train RMSE      =  8.352 +/-  0.484\n","Test  RMSE      =  8.155 +/-  2.028\n"],"name":"stdout"}]},{"cell_type":"code","metadata":{"id":"PtFBUJoAavT6","colab_type":"code","colab":{}},"source":[""],"execution_count":0,"outputs":[]}]}