# 24x3 array
data = [ 0.837698   0.49452   2.54352 
        -0.294096  -0.39636   0.728619
        -1.62089   -0.44919   1.20592 
        -1.06458   -0.68214  -1.12841 
         2.14341    0.7309    0.644968
        -0.284139  -1.133     1.98615 
         1.19879    2.55633  -0.526461
        -0.032277   0.11701  -0.249265
        -1.02516   -0.44665   2.50556 
        -0.515272  -0.578     0.515139
         0.259474  -1.24193   0.105051
         0.178546  -0.80547  -0.016838
        -0.607696  -0.21319  -1.40657 
         0.372248   0.93341  -0.667086
        -0.099814   0.52698  -0.253867
         0.743166  -0.79375   2.11131 
         0.109262  -1.28021  -0.415184
         0.499346  -0.95897  -2.24336 
        -0.191825  -0.59756  -0.63292 
        -1.98255   -1.5936   -0.935766
        -0.317612   1.33143  -0.46866 
         0.666652  -0.81507   0.370959
        -0.761136   0.10966  -0.997161
        -1.09972    0.28247  -0.846566 ]

wt = [ 0.4 1.0 0.6 ]

uniform_weights = normalize(ones(size(data,1)))

rwt = [ 91
        65
        57
        34
        83
        70
        74
        26
        37
        21
        62
        22
        91
        83
        89
        39
        20
        83
        36
        75
        43
        78
        83
        17 ]
w = normalize(convert(Array{Float64}, rwt))

# 50x25 array
bigdata = [
    0.231  0.059  0.957  0.172  0.779  0.093  0.029  0.643  0.104  0.468  0.315  0.209  0.582  0.044  0.309  0.902  0.528  0.974  0.117  0.468  0.319  0.706  0.648  0.368  0.447
    0.828  0.791  0.316  0.88   0.293  0.359  0.984  0.109  0.439  0.495  0.088  0.13   0.563  0.448  0.728  0.683  0.655  0.26   0.915  0.43   0.756  0.157  0.616  0.758  0.613
    0.949  0.904  0.083  0.591  0.185  0.942  0.215  0.504  0.745  0.219  0.899  0.61   0.335  0.85   0.521  0.266  0.933  0.124  0.401  0.558  0.737  0.011  0.384  0.785  0.031
    0.203  0.038  0.285  0.13   0.174  0.344  0.646  0.065  0.384  0.84   0.844  0.235  0.959  0.648  0.537  0.514  0.423  0.19   0.855  0.72   0.195  0.009  0.775  0.565  0.177
    0.91   0.336  0.797  0.165  0.498  0.488  0.649  0.571  0.31   0.189  0.189  0.519  0.949  0.993  0.372  0.202  0.231  0.392  0.631  0.53   0.75   0.561  0.36   0.698  0.857
    0.525  0.846  0.231  0.301  0.38   0.469  0.934  0.422  0.732  0.166  0.528  0.176  0.402  0.654  0.513  0.164  0.27   0.123  0.187  0.937  0.188  0.93   0.828  0.521  0.86 
    0.552  0.597  0.343  0.952  0.993  0.749  0.086  0.549  0.295  0.344  0.723  0.431  0.33   0.14   0.091  0.336  0.419  0.262  0.071  0.368  0.172  0.305  0.989  0.425  0.649
    0.4    0.265  0.412  0.844  0.923  0.83   0.99   0.815  0.942  0.028  0.655  0.593  0.981  0.36   0.753  0.863  0.782  0.558  0.352  0.818  0.438  0.989  0.609  0.926  0.696
    0.961  0.658  0.202  0.362  0.153  0.443  0.48   0.297  0.075  0.381  0.923  0.496  0.068  0.752  0.12   0.802  0.279  0.979  0.61   0.408  0.076  0.709  0.816  0.97   0.872
    0.915  0.179  0.402  0.696  0.003  0.332  0.33   0.91   0.318  0.573  0.635  0.291  0.8    0.515  0.305  0.531  0.607  0.133  0.041  0.725  0.911  0.413  0.256  0.463  0.506
    0.643  0.276  0.788  0.951  0.208  0.804  0.263  0.426  0.575  0.851  0.347  0.902  0.969  0.775  0.628  0.054  0.382  0.478  0.25   0.532  0.432  0.578  0.11   0.203  0.479
    0.479  0.387  0.059  0.287  0.065  0.295  0.527  0.003  0.605  0.187  0.794  0.18   0.396  0.228  0.363  0.099  0.145  0.703  0.894  0.603  0.141  0.099  0.172  0.89   0.546
    0.18   0.921  0.625  0.656  0.865  0.296  0.124  0.349  0.551  0.193  0.494  0.325  0.013  0.14   0.514  0.251  0.115  0.09   0.307  0.015  0.315  0.373  0.71   0.193  0.85 
    0.39   0.978  0.906  0.319  0.873  0.368  0.919  0.004  0.045  0.74   0.806  0.453  0.76   0.219  0.869  0.164  0.809  0.472  0.975  0.201  0.454  0.954  0.429  0.569  0.913
    0.094  0.622  0.213  0.197  0.817  0.941  0.056  0.605  0.122  0.435  0.636  0.823  0.038  0.388  0.592  0.27   0.418  0.862  0.964  0.803  0.466  0.49   0.592  0.831  0.905
    0.886  0.169  0.444  0.011  0.663  0.293  0.613  0.511  0.694  0.286  0.401  0.584  0.194  0.882  0.541  0.228  0.053  0.724  0.039  0.82   0.537  0.955  0.467  0.31   0.867
    0.735  0.282  0.53   0.397  0.961  0.297  0.065  0.201  0.083  0.932  0.09   0.363  0.53   0.33   0.967  0.6    0.794  0.36   0.065  0.297  0.619  0.954  0.553  0.313  0.543
    0.695  0.247  0.7    0.268  0.211  0.198  0.155  0.567  0.876  0.997  0.103  0.228  0.304  0.698  0.898  0.771  0.83   0.663  0.541  0.94   0.252  0.88   0.049  0.247  0.3  
    0.917  0.214  0.162  0.285  0.929  0.178  0.501  0.321  0.606  0.621  0.238  0.745  0.494  0.774  0.928  0.66   0.193  0.917  0.213  0.497  0.061  0.204  0.326  0.371  0.751
    0.733  0.791  0.396  0.067  0.905  0.481  0.246  0.542  0.633  0.352  0.133  0.227  0.1    0.214  0.115  0.474  0.307  0.53   0.132  0.684  0.168  0.813  0.476  0.865  0.988
    0.771  0.507  0.799  0.744  0.971  0.706  0.95   0.714  0.663  0.708  0.025  0.568  0.683  0.689  0.755  0.746  0.759  0.404  0.564  0.201  0.59   0.59   0.289  0.593  0.2  
    0.796  0.09   0.129  0.104  0.283  0.998  0.079  0.86   0.734  0.525  0.934  0.178  0.436  0.996  0.149  0.808  0.822  0.961  0.287  0.428  0.597  0.661  0.263  0.429  0.887
    0.237  0.238  0.366  0.796  0.427  0.849  0.899  0.814  0.571  0.992  0.521  0.669  0.77   0.975  0.465  0.313  0.848  0.527  0.859  0.307  0.005  0.756  0.79   0.383  0.141
    0.109  0.418  0.831  0.863  0.221  0.705  0.91   0.731  0.132  0.332  0.689  0.233  0.695  0.174  0.216  0.781  0.189  0.301  0.842  0.423  0.322  0.16   0.754  0.573  0.243
    0.412  0.59   0.222  0.151  0.653  0.928  0.858  0.292  0.592  0.235  0.574  0.279  0.579  0.595  0.258  0.37   0.948  0.313  0.046  0.341  0.72   0.715  0.298  0.086  0.598
    0.278  0.276  0.966  0.497  0.41   0.979  0.923  0.249  0.326  0.544  0.38   0.505  0.369  0.329  0.761  0.957  0.757  0.632  0.884  0.537  0.435  0.054  0.676  0.145  0.459
    0.369  0.706  0.034  0.595  0.788  0.652  0.15   0.219  0.576  0.196  0.079  0.212  0.465  0.636  0.758  0.244  0.403  0.022  0.748  0.634  0.484  0.466  0.324  0.083  0.291
    0.105  0.179  0.385  0.055  0.829  0.311  0.593  0.911  0.761  0.23   0.597  0.851  0.586  0.133  0.034  0.831  0.378  0.342  0.057  0.428  0.778  0.625  0.628  0.927  0.959
    0.853  0.543  0.402  0.721  0.109  0.947  0.053  0.577  0.623  0.133  0.598  0.72   0.7    0.759  0.763  0.556  0.825  0.112  0.835  0.14   0.427  0.173  0.025  0.09   0.126
    0.055  0.973  0.188  0.014  0.523  0.656  0.11   0.332  0.819  0.086  0.287  0.13   0.917  0.3    0.519  0.171  0.646  0.014  0.376  0.462  0.865  0.771  0.741  0.08   0.105
    0.118  0.822  0.953  0.399  0.244  0.026  0.438  0.3    0.765  0.179  0.621  0.259  0.781  0.454  0.347  0.121  0.203  0.487  0.677  0.982  0.453  0.035  0.164  0.378  0.828
    0.809  0.225  0.838  0.32   0.225  0.672  0.209  0.71   0.06   0.474  0.746  0.126  0.63   0.928  0.003  0.156  0.073  0.194  0.833  0.235  0.035  0.94   0.858  0.785  0.351
    0.025  0.172  0.691  0.962  0.701  0.943  0.601  0.332  0.191  0.201  0.764  0.517  0.127  0.706  0.473  0.579  0.136  0.579  0.184  0.961  0.601  0.697  0.12   0.26   0.506
    0.395  0.715  0.395  0.307  0.16   0.497  0.876  0.507  0.055  0.787  0.353  0.367  0.691  0.003  0.887  0.942  0.642  0.815  0.004  0.102  0.942  0.5    0.609  0.694  0.698
    0.367  0.631  0.486  0.475  0.931  0.44   0.816  0.493  0.231  0.12   0.637  0.291  0.321  0.839  0.689  0.822  0.444  0.433  0.11   0.327  0.858  0.758  0.986  0.519  0.794
    0.717  0.631  0.143  0.525  0.314  0.283  0.324  0.595  0.398  0.617  0.996  0.915  0.604  0.721  0.178  0.345  0.149  0.383  0.215  0.052  0.727  0.651  0.894  0.549  0.51 
    0.16   0.045  0.436  0.674  0.467  0.038  0.242  0.899  0.526  0.097  0.871  0.057  0.845  0.486  0.378  0.48   0.532  0.735  0.967  0.4    0.853  0.578  0.937  0.432  0.793
    0.342  0.509  0.557  0.241  0.151  0.405  0.668  0.255  0.3    0.937  0.192  0.204  0.516  0.244  0.379  0.473  0.34   0.213  0.809  0.335  0.536  0.777  0.167  0.724  0.253
    0.516  0.925  0.96   0.712  0.57   0.793  0.528  0.912  0.739  0.477  0.365  0.306  0.969  0.368  0.18   0.703  0.425  0.525  0.889  0.553  0.325  0.612  0.264  0.607  0.185
    0.691  0.251  0.734  0.521  0.615  0.529  0.091  0.928  0.123  0.465  0.38   0.598  0.34   0.67   0.192  0.786  0.585  0.743  0.487  0.445  0.38   0.193  0.036  0.992  0.99 
    0.158  0.038  0.621  0.153  0.682  0.747  0.205  0.717  0.209  0.265  0.106  0.626  0.288  0.762  0.877  0.023  0.046  0.312  0.455  0.861  0.118  0.476  0.647  0.03   0.682
    0.546  0.542  0.957  0.541  0.629  0.477  0.341  0.254  0.772  0.263  0.797  0.235  0.875  0.524  0.107  0.669  0.63   0.083  0.399  0.064  0.268  0.917  0.129  0.12   0.113
    0.858  0.371  0.171  0.868  0.271  0.934  0.72   0.675  0.017  0.299  0.666  0.47   0.541  0.627  0.364  0.579  0.876  0.223  0.441  0.503  0.979  0.832  0.567  0.075  0.037
    0.587  0.633  0.244  0.791  0.257  0.782  0.66   0.319  0.227  0.371  0.817  0.782  0.938  0.831  0.831  0.135  0.504  0.437  0.785  0.128  0.594  0.799  0.985  0.756  0.471
    0.827  0.408  0.469  0.521  0.712  0.337  0.199  0.127  0.034  0.884  0.003  0.045  0.604  0.433  0.69   0.234  0.429  0.749  0.045  0.663  0.066  0.714  0.152  0.92   0.354
    0.157  0.239  0.192  0.041  0.342  0.585  0.033  0.937  0.682  0.354  0.978  0.282  0.952  0.363  0.572  0.652  0.06   0.748  0.916  0.734  0.073  0.325  0.386  0.576  0.664
    0.184  0.877  0.863  0.514  0.115  0.169  0.713  0.359  0.171  0.755  0.054  0.069  0.925  0.904  0.541  0.359  0.714  0.011  0.15   0.278  0.726  0.72   0.591  0.132  0.195
    0.236  0.436  0.73   0.179  0.953  0.533  0.69   0.28   0.713  0.353  0.596  0.49   0.774  0.343  0.712  0.128  0.026  0.231  0.018  0.053  0.566  0.107  0.821  0.966  0.516
    0.74   0.989  0.963  0.24   0.602  0.354  0.901  0.454  0.671  0.556  0.565  0.513  0.255  0.036  0.055  0.918  0.142  0.973  0.382  0.155  0.346  0.701  0.201  0.26   0.149
    0.468  0.181  0.096  0.156  0.14   0.302  0.87   0.352  0.162  0.01   0.87   0.005  0.207  0.693  0.74   0.992  0.017  0.349  0.962  0.575  0.539  0.812  0.868  0.886  0.35 
]

w1 = ones(size(data,1))
w1[7] = 3

repl = [ 0.837698   0.49452   2.54352 
        -0.294096  -0.39636   0.728619
        -1.62089   -0.44919   1.20592 
        -1.06458   -0.68214  -1.12841 
         2.14341    0.7309    0.644968
        -0.284139  -1.133     1.98615 
         1.19879    2.55633  -0.526461
         1.19879    2.55633  -0.526461
         1.19879    2.55633  -0.526461
        -0.032277   0.11701  -0.249265
        -1.02516   -0.44665   2.50556 
        -0.515272  -0.578     0.515139
         0.259474  -1.24193   0.105051
         0.178546  -0.80547  -0.016838
        -0.607696  -0.21319  -1.40657 
         0.372248   0.93341  -0.667086
        -0.099814   0.52698  -0.253867
         0.743166  -0.79375   2.11131 
         0.109262  -1.28021  -0.415184
         0.499346  -0.95897  -2.24336 
        -0.191825  -0.59756  -0.63292 
        -1.98255   -1.5936   -0.935766
        -0.317612   1.33143  -0.46866 
         0.666652  -0.81507   0.370959
        -0.761136   0.10966  -0.997161
        -1.09972    0.28247  -0.846566 ]
 
rw = ones(size(repl,1))

nan_test_data = [ 0.0  -1.0   1.0   0.0  -1.0  -1.0   1.0  -1.0  -1.0  -1.0
                  1.0  -1.0  -1.0  -1.0  -1.0  -1.0  -1.0   0.0   1.0   1.0
                 -1.0   1.0   1.0  -1.0  -1.0  -1.0   1.0   0.0  -1.0   0.0
                  1.0  -1.0   0.0   1.0   0.0   1.0   0.0   0.0   0.0   0.0
                 -1.0   1.0   1.0  -1.0  -1.0  -1.0   1.0   0.0  -1.0   0.0
                  0.0   1.0  -1.0   0.0  -1.0   1.0   1.0   1.0   1.0   0.0
                 -1.0   1.0   1.0  -1.0  -1.0  -1.0   1.0   0.0  -1.0   0.0
                  0.0   1.0   0.0   0.0   0.0   0.0   1.0   0.0  -1.0   0.0
                 -1.0   1.0   1.0  -1.0  -1.0  -1.0   1.0   0.0  -1.0   0.0
                 -1.0   1.0   1.0  -1.0  -1.0  -1.0   1.0   0.0  -1.0   0.0
                 -1.0  -1.0  -1.0   0.0   0.0   1.0   1.0   0.0   1.0  -1.0
                  0.0   0.0   1.0   1.0   1.0  -1.0   0.0  -1.0   0.0  -1.0
                 -1.0  -1.0  -1.0  -1.0  -1.0  -1.0  -1.0  -1.0  -1.0  -1.0
                 -1.0   1.0   1.0  -1.0  -1.0  -1.0   1.0   0.0  -1.0   0.0
                  0.0   1.0  -1.0  -1.0   1.0   0.0  -1.0   0.0  -1.0  -1.0
                 -1.0   1.0   1.0  -1.0  -1.0  -1.0   1.0   0.0  -1.0   0.0
                  0.0   0.0   0.0   0.0   0.0   0.0   1.0   1.0   1.0   0.0
                 -1.0   1.0   1.0  -1.0  -1.0  -1.0   1.0   0.0  -1.0   0.0
                 -1.0   1.0   1.0  -1.0  -1.0  -1.0   1.0   0.0  -1.0   0.0
                 -1.0   1.0   1.0   0.0  -1.0   1.0   0.0   1.0   0.0   0.0
                 -1.0   1.0   1.0  -1.0  -1.0  -1.0   1.0   0.0  -1.0   0.0
                 -1.0   1.0   1.0  -1.0  -1.0  -1.0   1.0   0.0  -1.0   0.0
                  1.0   0.0   0.0   1.0   0.0   1.0   0.0  -1.0   0.0  -1.0
                 -1.0   1.0   1.0  -1.0  -1.0  -1.0   1.0   0.0  -1.0   0.0
                  1.0  -1.0   0.0   1.0   0.0   1.0   0.0   0.0   0.0   0.0 ]

nan_test_wt = [0.02831793460992489,0.004744050454818514,0.02822019260230257,0.005236932428348762,0.0492706652801098,0.006454522058030116,0.12219757929079839,0.044007167153604775,0.1322092637943642,0.018847091843058467,0.03117301149825311,0.027270955184623146,0.04017412455447496,0.0005958465507276494,0.019037064090875612,0.050632029210891875,0.07630930567408355,0.009961180834579806,0.02822019260230257,0.041029418374897736,0.009204977191773241,0.08762854701121198,0.023362497542365402,0.07330510717709664,0.04259034298648226]
