Agarwal commited on
Commit
9fa8ff6
·
1 Parent(s): e38260c
.ipynb_checkpoints/README-checkpoint.md ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ This respository contains trained neural networks that can be used to predict the steady-state temperature profile.
2
+
3
+ Step 1: Define the simulation parameters
4
+ Step 2: The output is as follows
5
+ - the depth profile (first column) and the temperature profile (second column)
6
+ - corresonding plot of the temperature profile
.ipynb_checkpoints/app-checkpoint.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+
3
+ from matplotlib import pyplot as plt
4
+ from utils import *
5
+ import warnings
6
+
7
+ raq_ra = st.number_input("# raq/ra ", value=None, placeholder="betweel 0 and 10")
8
+ st.write("raq/ra = ", raq_ra)
9
+
10
+ fkt = st.number_input("# FKT ", value=None, placeholder="betweel 1e+6 and 1e+10")
11
+ st.write("FKT = ", fkt)
12
+
13
+ fkv = st.number_input("# FKV ", value=None, placeholder="betweel 0 and 100")
14
+ st.write("FKV = ", fkv)
15
+
16
+ num_points = st.number_input("# number of profile points ", value=None, placeholder=" e.g. 128")
17
+ st.write("number of profile points = ", num_points)
18
+
19
+ with open('numpy_networks/mlp_[256, 256, 256, 256].pkl', 'rb') as file:
20
+ mlp = pickle.load(file)
21
+
22
+ r_list = [raq_ra]
23
+ t_list = [fkt]
24
+ v_list = [fkv]
25
+
26
+ for i in range(len(r_list)):
27
+ if r_list[i]<0 or r_list[i]>9.5:
28
+ warnings.warn('RaQ/Ra is outside the range of the training dataset')
29
+ if t_list[i]<1e+6 or t_list[i]>5e+9:
30
+ warnings.warn('FKT is outside the range of the training dataset')
31
+ if v_list[i]<1 or v_list[i]>95:
32
+ warnings.warn('FKV is outside the range of the training dataset')
33
+
34
+ y_prof = np.linspace(0,1,num_points)[::-1]
35
+
36
+ ### calculates temperature profile ###
37
+ x_in = get_input(r_list, t_list, v_list, y_prof)
38
+ y_pred_nn_pointwise = get_profile(x_in, mlp, num_sims=len(r_list))
39
+ ### calculates temperature profile ###
40
+
41
+ ### writes out temperature profile ###
42
+ st.write("Depth", "Temperature")
43
+ for i in range(len(r_list)):
44
+ for j in range(len(y_prof)):
45
+ st.write(str(y_prof[j]) ," ", str(y_pred_nn_pointwise[i,j]), "\n")
46
+ ### writes out temperature profile ###
47
+
48
+
49
+ ### plots temperature profile ###
50
+ for i in range(len(r_list)):
51
+ plt.figure()
52
+ plt.plot(y_pred_nn_pointwise[i,:], y_prof, 'k-', linewidth=3.0, label="pointwise neural network")
53
+ plt.ylim([1,0])
54
+ plt.xlabel("Temperature")
55
+ plt.ylabel("Depth")
56
+ plt.legend()
57
+ plt.grid()
58
+ st.pyplot(fig)
59
+ ### plots temperature profile ###
.ipynb_checkpoints/calculate_profiles-checkpoint.py ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from matplotlib import pyplot as plt
2
+ from utils import *
3
+ import warnings
4
+
5
+ #### Define outputs ####
6
+ write_file = True
7
+ plot_profile = True
8
+ #### Define outputs ####
9
+
10
+ with open('numpy_networks/mlp_[256, 256, 256, 256].pkl', 'rb') as file:
11
+ mlp = pickle.load(file)
12
+
13
+ f_nn = "my_simulation_parameters.txt"
14
+ with open(f_nn) as fw:
15
+ lines = fw.readlines()
16
+
17
+ for line in lines:
18
+ l = line.rstrip()
19
+ if "r_list" in l:
20
+ if not l[-1] == ",":
21
+ raise Exception("Ensure there is a comma after last parameter value in " + f_nn)
22
+ r_list = [float(p) for p in l.split("=")[1].split(",")[:-1]]
23
+ elif "t_list" in line:
24
+ if not l[-1] == ",":
25
+ raise Exception("Ensure there is a comma after last parameter value in " + f_nn)
26
+ t_list = [float(p) for p in l.split("=")[1].split(",")[:-1]]
27
+ elif "v_list" in line:
28
+ if not l[-1] == ",":
29
+ raise Exception("Ensure there is a comma after last parameter value in " + f_nn)
30
+ v_list = [float(p) for p in l.split("=")[1].split(",")[:-1]]
31
+
32
+ if not len(r_list) == len(v_list) and len(r_list) == len(t_list):
33
+ raise Exception("Ensure equal number of values for all parameters in " + f_nn)
34
+
35
+ for i in range(len(r_list)):
36
+ if r_list[i]<0 or r_list[i]>9.5:
37
+ warnings.warn('RaQ/Ra is outside the range of the training dataset')
38
+ if t_list[i]<1e+6 or t_list[i]>5e+9:
39
+ warnings.warn('FKT is outside the range of the training dataset')
40
+ if v_list[i]<1 or v_list[i]>95:
41
+ warnings.warn('FKV is outside the range of the training dataset')
42
+
43
+ ### calculates y points ###
44
+ num_points = 128
45
+ y_prof = np.linspace(0,1,num_points)[::-1]
46
+ ### calculates y points ###
47
+
48
+
49
+ ### calculates temperature profile ###
50
+ x_in = get_input(r_list, t_list, v_list, y_prof)
51
+ y_pred_nn_pointwise = get_profile(x_in, mlp, num_sims=len(r_list))
52
+ ### calculates temperature profile ###
53
+
54
+
55
+ ### writes out temperature profile ###
56
+ if write_file:
57
+ for i in range(len(r_list)):
58
+ fname = "outputs/profile_raq_ra" + str(r_list[i]) + "_fkt" + str(t_list[i]) + "_fkv" + str(v_list[i])
59
+ f = open(fname + ".txt", "wb")
60
+ for j in range(len(y_prof)):
61
+ f.writelines([str(y_prof[j]).encode('ascii'),
62
+ " ".encode('ascii'),
63
+ str(y_pred_nn_pointwise[i,j]).encode('ascii'),
64
+ "\n".encode('ascii')])
65
+ f.close()
66
+ ### writes out temperature profile ###
67
+
68
+
69
+ ### plots temperature profile ###
70
+ for i in range(len(r_list)):
71
+ fname = "outputs/profile_raq_ra" + str(r_list[i]) + "_fkt" + str(t_list[i]) + "_fkv" + str(v_list[i])
72
+ plt.figure()
73
+ plt.plot(y_pred_nn_pointwise[i,:], y_prof, 'k-', linewidth=3.0, label="pointwise neural network")
74
+ plt.ylim([1,0])
75
+ plt.xlabel("Temperature")
76
+ plt.ylabel("Depth")
77
+ plt.legend()
78
+ plt.grid()
79
+ plt.savefig(fname + ".png")
80
+ ### plots temperature profile ###
.ipynb_checkpoints/utils-checkpoint.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pickle
3
+
4
+ def selu(x):
5
+ alpha = 1.6732632423543772848170429916717
6
+ scale = 1.0507009873554804934193349852946
7
+ return scale*( np.maximum(0,x) + np.minimum(alpha*(np.exp(x)-1), 0) )
8
+
9
+ def non_dimensionalize_raq(x):
10
+ return (x-0.12624371)/(9.70723344-0.12624371)
11
+
12
+ def non_dimensionalize_fkt(x):
13
+ return (np.log10(x)-6.00352841978384)/(9.888820429862925-6.00352841978384)
14
+
15
+ def non_dimensionalize_fkv(x):
16
+ return (np.log10(x)-0.005251646002323797)/(1.9927988938926755-0.005251646002323797)
17
+
18
+ def dimensionalize_raq(x):
19
+ return x*(9.70723344-0.12624371) + 0.12624371
20
+
21
+ def dimensionalize_fkt(x):
22
+ return 10**(x*(9.888820429862925-6.00352841978384)+6.00352841978384)
23
+
24
+ def dimensionalize_fkv(x):
25
+ return 10**(x*(1.9927988938926755-0.005251646002323797)+0.005251646002323797)
26
+
27
+ def get_input(raq_ra, fkt, fkp, y_prof):
28
+
29
+ x = np.zeros((len(raq_ra)*len(y_prof), 4))
30
+
31
+ cntr = 0
32
+ for i in range(len(raq_ra)):
33
+ for j in range(len(y_prof)):
34
+ x[cntr,0] = non_dimensionalize_raq(raq_ra[i])
35
+ x[cntr,1] = non_dimensionalize_fkt(fkt[i])
36
+ x[cntr,2] = non_dimensionalize_fkv(fkp[i])
37
+ x[cntr,3] = y_prof[j]
38
+ cntr += 1
39
+
40
+ return x
41
+
42
+ def get_profile(inp, mlp, num_sims=1, num_points=128):
43
+
44
+ num_layers = len(mlp)-1
45
+ y_pred = inp
46
+ res = []
47
+ for l in range(num_layers+1):
48
+
49
+ y_pred = y_pred @ mlp[l][0].T + mlp[l][1]
50
+
51
+ if l in [num_layers-1]:
52
+ y_pred = np.concatenate((inp,y_pred), axis=-1)
53
+
54
+ if l != num_layers:
55
+ for r in res:
56
+ y_pred += r
57
+
58
+ y_pred = selu(y_pred)
59
+ res.append(y_pred)
60
+
61
+ y_pred = y_pred.reshape(num_sims, num_points)
62
+ y_pred[:,0] = 1.
63
+ y_pred[:,-1] = 0.
64
+
65
+ return y_pred
66
+
67
+
68
+
69
+
README.md CHANGED
@@ -1,13 +1,6 @@
1
- ---
2
- title: Steadystate Mantle
3
- emoji: 🐠
4
- colorFrom: red
5
- colorTo: purple
6
- sdk: streamlit
7
- sdk_version: 1.35.0
8
- app_file: app.py
9
- pinned: false
10
- license: mit
11
- ---
12
-
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
+ This respository contains trained neural networks that can be used to predict the steady-state temperature profile.
2
+
3
+ Step 1: Define the simulation parameters
4
+ Step 2: The output is as follows
5
+ - the depth profile (first column) and the temperature profile (second column)
6
+ - corresonding plot of the temperature profile
 
 
 
 
 
 
 
app.py CHANGED
@@ -1,10 +1,59 @@
1
  import streamlit as st
2
 
3
- number = st.number_input("# raq/ra ", value=None, placeholder="betweel 0 and 10")
4
- st.write("raq/ra = ", number)
 
5
 
6
- number = st.number_input("# FKT ", value=None, placeholder="betweel 1e+6 and 1e+10")
7
- st.write("FKT = ", number)
8
 
9
- number = st.number_input("# FKV ", value=None, placeholder="betweel 0 and 100")
10
- st.write("FKV = ", number)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
 
3
+ from matplotlib import pyplot as plt
4
+ from utils import *
5
+ import warnings
6
 
7
+ raq_ra = st.number_input("# raq/ra ", value=None, placeholder="betweel 0 and 10")
8
+ st.write("raq/ra = ", raq_ra)
9
 
10
+ fkt = st.number_input("# FKT ", value=None, placeholder="betweel 1e+6 and 1e+10")
11
+ st.write("FKT = ", fkt)
12
+
13
+ fkv = st.number_input("# FKV ", value=None, placeholder="betweel 0 and 100")
14
+ st.write("FKV = ", fkv)
15
+
16
+ num_points = st.number_input("# number of profile points ", value=None, placeholder=" e.g. 128")
17
+ st.write("number of profile points = ", num_points)
18
+
19
+ with open('numpy_networks/mlp_[256, 256, 256, 256].pkl', 'rb') as file:
20
+ mlp = pickle.load(file)
21
+
22
+ r_list = [raq_ra]
23
+ t_list = [fkt]
24
+ v_list = [fkv]
25
+
26
+ for i in range(len(r_list)):
27
+ if r_list[i]<0 or r_list[i]>9.5:
28
+ warnings.warn('RaQ/Ra is outside the range of the training dataset')
29
+ if t_list[i]<1e+6 or t_list[i]>5e+9:
30
+ warnings.warn('FKT is outside the range of the training dataset')
31
+ if v_list[i]<1 or v_list[i]>95:
32
+ warnings.warn('FKV is outside the range of the training dataset')
33
+
34
+ y_prof = np.linspace(0,1,num_points)[::-1]
35
+
36
+ ### calculates temperature profile ###
37
+ x_in = get_input(r_list, t_list, v_list, y_prof)
38
+ y_pred_nn_pointwise = get_profile(x_in, mlp, num_sims=len(r_list))
39
+ ### calculates temperature profile ###
40
+
41
+ ### writes out temperature profile ###
42
+ st.write("Depth", "Temperature")
43
+ for i in range(len(r_list)):
44
+ for j in range(len(y_prof)):
45
+ st.write(str(y_prof[j]) ," ", str(y_pred_nn_pointwise[i,j]), "\n")
46
+ ### writes out temperature profile ###
47
+
48
+
49
+ ### plots temperature profile ###
50
+ for i in range(len(r_list)):
51
+ plt.figure()
52
+ plt.plot(y_pred_nn_pointwise[i,:], y_prof, 'k-', linewidth=3.0, label="pointwise neural network")
53
+ plt.ylim([1,0])
54
+ plt.xlabel("Temperature")
55
+ plt.ylabel("Depth")
56
+ plt.legend()
57
+ plt.grid()
58
+ st.pyplot(fig)
59
+ ### plots temperature profile ###
calculate_profiles.py ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from matplotlib import pyplot as plt
2
+ from utils import *
3
+ import warnings
4
+
5
+ #### Define outputs ####
6
+ write_file = True
7
+ plot_profile = True
8
+ #### Define outputs ####
9
+
10
+ with open('numpy_networks/mlp_[256, 256, 256, 256].pkl', 'rb') as file:
11
+ mlp = pickle.load(file)
12
+
13
+ f_nn = "my_simulation_parameters.txt"
14
+ with open(f_nn) as fw:
15
+ lines = fw.readlines()
16
+
17
+ for line in lines:
18
+ l = line.rstrip()
19
+ if "r_list" in l:
20
+ if not l[-1] == ",":
21
+ raise Exception("Ensure there is a comma after last parameter value in " + f_nn)
22
+ r_list = [float(p) for p in l.split("=")[1].split(",")[:-1]]
23
+ elif "t_list" in line:
24
+ if not l[-1] == ",":
25
+ raise Exception("Ensure there is a comma after last parameter value in " + f_nn)
26
+ t_list = [float(p) for p in l.split("=")[1].split(",")[:-1]]
27
+ elif "v_list" in line:
28
+ if not l[-1] == ",":
29
+ raise Exception("Ensure there is a comma after last parameter value in " + f_nn)
30
+ v_list = [float(p) for p in l.split("=")[1].split(",")[:-1]]
31
+
32
+ if not len(r_list) == len(v_list) and len(r_list) == len(t_list):
33
+ raise Exception("Ensure equal number of values for all parameters in " + f_nn)
34
+
35
+ for i in range(len(r_list)):
36
+ if r_list[i]<0 or r_list[i]>9.5:
37
+ warnings.warn('RaQ/Ra is outside the range of the training dataset')
38
+ if t_list[i]<1e+6 or t_list[i]>5e+9:
39
+ warnings.warn('FKT is outside the range of the training dataset')
40
+ if v_list[i]<1 or v_list[i]>95:
41
+ warnings.warn('FKV is outside the range of the training dataset')
42
+
43
+ ### calculates y points ###
44
+ num_points = 128
45
+ y_prof = np.linspace(0,1,num_points)[::-1]
46
+ ### calculates y points ###
47
+
48
+
49
+ ### calculates temperature profile ###
50
+ x_in = get_input(r_list, t_list, v_list, y_prof)
51
+ y_pred_nn_pointwise = get_profile(x_in, mlp, num_sims=len(r_list))
52
+ ### calculates temperature profile ###
53
+
54
+
55
+ ### writes out temperature profile ###
56
+ if write_file:
57
+ for i in range(len(r_list)):
58
+ fname = "outputs/profile_raq_ra" + str(r_list[i]) + "_fkt" + str(t_list[i]) + "_fkv" + str(v_list[i])
59
+ f = open(fname + ".txt", "wb")
60
+ for j in range(len(y_prof)):
61
+ f.writelines([str(y_prof[j]).encode('ascii'),
62
+ " ".encode('ascii'),
63
+ str(y_pred_nn_pointwise[i,j]).encode('ascii'),
64
+ "\n".encode('ascii')])
65
+ f.close()
66
+ ### writes out temperature profile ###
67
+
68
+
69
+ ### plots temperature profile ###
70
+ for i in range(len(r_list)):
71
+ fname = "outputs/profile_raq_ra" + str(r_list[i]) + "_fkt" + str(t_list[i]) + "_fkv" + str(v_list[i])
72
+ plt.figure()
73
+ plt.plot(y_pred_nn_pointwise[i,:], y_prof, 'k-', linewidth=3.0, label="pointwise neural network")
74
+ plt.ylim([1,0])
75
+ plt.xlabel("Temperature")
76
+ plt.ylabel("Depth")
77
+ plt.legend()
78
+ plt.grid()
79
+ plt.savefig(fname + ".png")
80
+ ### plots temperature profile ###
inputs/.ipynb_checkpoints/simulations-checkpoint.txt ADDED
@@ -0,0 +1,133 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---------- ------- ---------- ----------- ------------
2
+ Simulation Dataset RaQ/Ra FKT FKV
3
+ 0 train 4.21479129 3.01635241 86422511.6
4
+ 1 test 9.51640694 94.18089723 4821329.69
5
+ 2 cv 6.271087 42.76214789 4941931.78
6
+ 3 train 0.44977861 10.12215385 94829681.7
7
+ 4 train 8.36560001 35.73834584 1455479.5
8
+ 5 train 5.67467946 1.64110011 6367690.4
9
+ 6 train 0.70579799 5.84651349 1187881210.0
10
+ 7 train 7.52290158 53.86099519 12388219.2
11
+ 8 train 0.12624371 24.95454721 1318547.97
12
+ 9 train 2.2359818 1.05720485 199740515.0
13
+ 10 train 3.02242373 11.01921145 1640799310.0
14
+ 11 train 6.26027711 4.53218755 366333588.0
15
+ 12 train 4.62259524 11.02248583 427407621.0
16
+ 13 train 1.43277485 24.85716255 12376736.0
17
+ 14 train 7.5645277 4.35148281 401681704.0
18
+ 15 test 6.70361149 73.08858228 6558027490.0
19
+ 16 train 0.40661271 52.21228433 90006820.6
20
+ 17 cv 8.63392765 2.64317264 2187883890.0
21
+ 18 train 3.41552419 53.31843207 73876163.5
22
+ 19 train 8.36636851 1.2217876 20837722.4
23
+ 20 train 3.12477618 1.09223116 804157796.0
24
+ 21 train 6.15233896 45.71924975 106800064.0
25
+ 22 train 2.68648417 25.01160266 452572724.0
26
+ 23 train 5.62854856 16.83603314 55118347.0
27
+ 24 train 1.99522057 50.38927796 11793606.7
28
+ 25 train 8.43504053 1.74586381 72241124.0
29
+ 26 train 0.15682868 3.01303633 48187633.5
30
+ 27 train 5.23937284 18.24324448 232584444.0
31
+ 28 train 6.31543445 10.52630296 14606584.6
32
+ 29 train 1.73038798 2.83393888 16181068.4
33
+ 30 train 8.68359761 16.95695456 378170888.0
34
+ 31 train 3.57184556 11.05039248 1719860.91
35
+ 32 cv 5.76158491 4.53324069 2126761.12
36
+ 33 train 1.70166218 38.34916682 228426072.0
37
+ 34 train 3.30432282 2.89257812 17507664.5
38
+ 35 train 4.5068189 17.86232579 27437525.3
39
+ 36 train 6.23026526 2.72393805 7692784.95
40
+ 37 train 5.51926251 34.72353772 81760879.4
41
+ 38 cv 5.61895019 13.79164313 967450922.0
42
+ 39 cv 1.09103611 91.31624922 1517951.65
43
+ 40 cv 3.54175058 39.71096834 1008157.58
44
+ 41 train 1.92594717 37.01010991 9772036.92
45
+ 42 test 9.70723344 56.28276492 1015330.41
46
+ 43 train 3.08975856 4.57055172 17770265.0
47
+ 44 train 1.38024964 22.65176318 10395536.1
48
+ 45 train 3.75282062 19.97972718 104595781.0
49
+ 46 train 1.22269777 14.27630401 1679205170.0
50
+ 47 train 2.80488044 2.29915349 3593013.29
51
+ 48 train 5.13189578 13.2442949 136549073.0
52
+ 49 train 4.9460591 10.7995829 24287525.2
53
+ 50 train 4.99909724 22.46165834 52737683.8
54
+ 51 train 7.31775345 7.73718569 1020606.79
55
+ 52 train 7.06258578 86.12572482 2224833.68
56
+ 53 train 0.6899426 59.64510385 1932628.07
57
+ 54 train 3.386145 3.55017896 21014570.1
58
+ 55 test 7.38284445 11.66799278 7538933640.0
59
+ 56 train 4.08423546 1.74922806 725904067.0
60
+ 57 cv 2.67362452 26.28214014 51821221.6
61
+ 58 test 2.24712439 98.26613159 129667315.0
62
+ 59 cv 4.44838147 4.03057782 2805039520.0
63
+ 60 cv 2.80942173 10.34159997 284317229.0
64
+ 61 train 7.79341449 2.02389494 698171693.0
65
+ 62 train 2.14158405 1.09767463 214437223.0
66
+ 63 train 1.00672028 1.77203701 2146927190.0
67
+ 64 train 2.75896303 1.01216577 1629808.7
68
+ 65 train 2.31856541 16.92198257 3258621.07
69
+ 66 train 0.51313958 76.60516337 16316079.4
70
+ 67 train 6.91868053 2.29281923 2581835.05
71
+ 68 test 3.19785433 38.51330949 7741416430.0
72
+ 69 test 9.70176645 66.88205995 6742721.02
73
+ 70 train 4.5807426 19.59110792 4389885.79
74
+ 71 train 0.74078406 2.52622848 511362006.0
75
+ 72 train 5.08755399 7.40943107 19058749.0
76
+ 73 train 4.99394988 1.27436515 6085204.83
77
+ 74 train 0.45521012 20.65980997 40770664.9
78
+ 75 train 3.97347544 10.76777553 1610519.78
79
+ 76 cv 6.9980184 85.15026443 85457763.4
80
+ 77 test 9.68233821 2.24352405 9101478.03
81
+ 78 train 3.5983924 59.08511032 513989594.0
82
+ 79 train 1.71516547 5.28981365 2021804.21
83
+ 80 train 8.90619866 92.87866715 842798515.0
84
+ 81 train 2.15518657 34.53441385 142408852.0
85
+ 82 train 4.18290969 40.8230102 5667779.62
86
+ 83 cv 4.40076342 2.60998341 86553821.3
87
+ 84 train 7.923791 2.00903448 7847389.6
88
+ 85 test 9.49745122 96.2027838 64145832.8
89
+ 86 test 3.00577213 1.77607573 6848799970.0
90
+ 87 test 6.43144786 2.80081331 5133341820.0
91
+ 88 train 3.23268188 7.33454372 683657849.0
92
+ 89 train 1.45449431 66.14228054 12661236.2
93
+ 90 train 7.55357502 47.56032424 840358151.0
94
+ 91 train 0.8745516 19.03683004 1348280.14
95
+ 92 cv 2.48944593 64.96293189 1453255.63
96
+ 93 test 3.29462875 23.07376101 7301730590.0
97
+ 94 test 3.08732987 98.35555512 14813542.7
98
+ 95 cv 2.27394372 55.85930328 3949869450.0
99
+ 96 train 2.16161172 11.81239082 465455593.0
100
+ 97 train 7.00771735 4.13900522 1880711.48
101
+ 98 cv 0.526931 16.34974624 24964307.0
102
+ 99 train 1.07206555 2.98839101 211956460.0
103
+ 100 train 6.79733173 2.58574662 475523342.0
104
+ 101 train 5.10030487 4.03819673 1664087320.0
105
+ 102 train 7.46133626 5.88239496 109443847.0
106
+ 103 train 1.31598953 29.4046416 20509114.1
107
+ 104 train 7.67114992 1.7810773 3772943.1
108
+ 105 cv 5.79324631 6.49745687 2011480.87
109
+ 106 train 6.94242801 1.06855549 8393763.9
110
+ 107 train 7.87136172 10.88264954 169155714.0
111
+ 108 train 0.53522819 83.44910045 3944779.68
112
+ 109 train 4.35426606 4.52555225 100728730.0
113
+ 110 train 0.32364009 9.14243349 322819020.0
114
+ 111 train 3.66563052 10.08158087 168383022.0
115
+ 112 test 7.87992683 5.70103793 5860425410.0
116
+ 113 train 0.26710223 3.57980514 3636478.69
117
+ 114 train 2.22284414 1.08850972 10277545.5
118
+ 115 train 0.97760298 1.86846107 103097759.0
119
+ 116 train 2.38140612 32.34044506 3319135.1
120
+ 117 train 9.32155776 9.07025187 231431645.0
121
+ 118 test 9.98475203 20.79358182 5235741.91
122
+ 119 train 8.75081696 28.82373853 1686046950.0
123
+ 120 train 0.80523448 6.37660281 3296282.95
124
+ 121 train 5.3432885 73.14491846 2298401.12
125
+ 122 cv 9.17743012 7.49928359 179784166.0
126
+ 123 train 3.00500403 16.42289134 4046947.23
127
+ 124 train 2.45033082 13.06101342 3027412.85
128
+ 125 train 5.47610781 70.54556374 1311680.04
129
+ 126 train 7.10811033 2.59864226 4812900.22
130
+ 127 test 9.46423039 1.90577479 7513000020.0
131
+ 128 train 2.7278834 5.33062584 495559360.0
132
+ 129 train 7.66466625 3.97436225 8300665.59
133
+ ---------- ------- ---------- ----------- ------------
inputs/simulations.txt ADDED
@@ -0,0 +1,133 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---------- ------- ---------- ------------ -----------
2
+ Simulation Dataset RaQ/Ra FKT FKV
3
+ 0 train 4.21479129 86422511.6 3.01635241
4
+ 1 test 9.51640694 4821329.69 94.18089723
5
+ 2 cv 6.271087 4941931.78 42.76214789
6
+ 3 train 0.44977861 94829681.7 10.12215385
7
+ 4 train 8.36560001 1455479.5 35.73834584
8
+ 5 train 5.67467946 6367690.4 1.64110011
9
+ 6 train 0.70579799 1187881210.0 5.84651349
10
+ 7 train 7.52290158 12388219.2 53.86099519
11
+ 8 train 0.12624371 1318547.97 24.95454721
12
+ 9 train 2.2359818 199740515.0 1.05720485
13
+ 10 train 3.02242373 1640799310.0 11.01921145
14
+ 11 train 6.26027711 366333588.0 4.53218755
15
+ 12 train 4.62259524 427407621.0 11.02248583
16
+ 13 train 1.43277485 12376736.0 24.85716255
17
+ 14 train 7.5645277 401681704.0 4.35148281
18
+ 15 test 6.70361149 6558027490.0 73.08858228
19
+ 16 train 0.40661271 90006820.6 52.21228433
20
+ 17 cv 8.63392765 2187883890.0 2.64317264
21
+ 18 train 3.41552419 73876163.5 53.31843207
22
+ 19 train 8.36636851 20837722.4 1.2217876
23
+ 20 train 3.12477618 804157796.0 1.09223116
24
+ 21 train 6.15233896 106800064.0 45.71924975
25
+ 22 train 2.68648417 452572724.0 25.01160266
26
+ 23 train 5.62854856 55118347.0 16.83603314
27
+ 24 train 1.99522057 11793606.7 50.38927796
28
+ 25 train 8.43504053 72241124.0 1.74586381
29
+ 26 train 0.15682868 48187633.5 3.01303633
30
+ 27 train 5.23937284 232584444.0 18.24324448
31
+ 28 train 6.31543445 14606584.6 10.52630296
32
+ 29 train 1.73038798 16181068.4 2.83393888
33
+ 30 train 8.68359761 378170888.0 16.95695456
34
+ 31 train 3.57184556 1719860.91 11.05039248
35
+ 32 cv 5.76158491 2126761.12 4.53324069
36
+ 33 train 1.70166218 228426072.0 38.34916682
37
+ 34 train 3.30432282 17507664.5 2.89257812
38
+ 35 train 4.5068189 27437525.3 17.86232579
39
+ 36 train 6.23026526 7692784.95 2.72393805
40
+ 37 train 5.51926251 81760879.4 34.72353772
41
+ 38 cv 5.61895019 967450922.0 13.79164313
42
+ 39 cv 1.09103611 1517951.65 91.31624922
43
+ 40 cv 3.54175058 1008157.58 39.71096834
44
+ 41 train 1.92594717 9772036.92 37.01010991
45
+ 42 test 9.70723344 1015330.41 56.28276492
46
+ 43 train 3.08975856 17770265.0 4.57055172
47
+ 44 train 1.38024964 10395536.1 22.65176318
48
+ 45 train 3.75282062 104595781.0 19.97972718
49
+ 46 train 1.22269777 1679205170.0 14.27630401
50
+ 47 train 2.80488044 3593013.29 2.29915349
51
+ 48 train 5.13189578 136549073.0 13.2442949
52
+ 49 train 4.9460591 24287525.2 10.7995829
53
+ 50 train 4.99909724 52737683.8 22.46165834
54
+ 51 train 7.31775345 1020606.79 7.73718569
55
+ 52 train 7.06258578 2224833.68 86.12572482
56
+ 53 train 0.6899426 1932628.07 59.64510385
57
+ 54 train 3.386145 21014570.1 3.55017896
58
+ 55 test 7.38284445 7538933640.0 11.66799278
59
+ 56 train 4.08423546 725904067.0 1.74922806
60
+ 57 cv 2.67362452 51821221.6 26.28214014
61
+ 58 test 2.24712439 129667315.0 98.26613159
62
+ 59 cv 4.44838147 2805039520.0 4.03057782
63
+ 60 cv 2.80942173 284317229.0 10.34159997
64
+ 61 train 7.79341449 698171693.0 2.02389494
65
+ 62 train 2.14158405 214437223.0 1.09767463
66
+ 63 train 1.00672028 2146927190.0 1.77203701
67
+ 64 train 2.75896303 1629808.7 1.01216577
68
+ 65 train 2.31856541 3258621.07 16.92198257
69
+ 66 train 0.51313958 16316079.4 76.60516337
70
+ 67 train 6.91868053 2581835.05 2.29281923
71
+ 68 test 3.19785433 7741416430.0 38.51330949
72
+ 69 test 9.70176645 6742721.02 66.88205995
73
+ 70 train 4.5807426 4389885.79 19.59110792
74
+ 71 train 0.74078406 511362006.0 2.52622848
75
+ 72 train 5.08755399 19058749.0 7.40943107
76
+ 73 train 4.99394988 6085204.83 1.27436515
77
+ 74 train 0.45521012 40770664.9 20.65980997
78
+ 75 train 3.97347544 1610519.78 10.76777553
79
+ 76 cv 6.9980184 85457763.4 85.15026443
80
+ 77 test 9.68233821 9101478.03 2.24352405
81
+ 78 train 3.5983924 513989594.0 59.08511032
82
+ 79 train 1.71516547 2021804.21 5.28981365
83
+ 80 train 8.90619866 842798515.0 92.87866715
84
+ 81 train 2.15518657 142408852.0 34.53441385
85
+ 82 train 4.18290969 5667779.62 40.8230102
86
+ 83 cv 4.40076342 86553821.3 2.60998341
87
+ 84 train 7.923791 7847389.6 2.00903448
88
+ 85 test 9.49745122 64145832.8 96.2027838
89
+ 86 test 3.00577213 6848799970.0 1.77607573
90
+ 87 test 6.43144786 5133341820.0 2.80081331
91
+ 88 train 3.23268188 683657849.0 7.33454372
92
+ 89 train 1.45449431 12661236.2 66.14228054
93
+ 90 train 7.55357502 840358151.0 47.56032424
94
+ 91 train 0.8745516 1348280.14 19.03683004
95
+ 92 cv 2.48944593 1453255.63 64.96293189
96
+ 93 test 3.29462875 7301730590.0 23.07376101
97
+ 94 test 3.08732987 14813542.7 98.35555512
98
+ 95 cv 2.27394372 3949869450.0 55.85930328
99
+ 96 train 2.16161172 465455593.0 11.81239082
100
+ 97 train 7.00771735 1880711.48 4.13900522
101
+ 98 cv 0.526931 24964307.0 16.34974624
102
+ 99 train 1.07206555 211956460.0 2.98839101
103
+ 100 train 6.79733173 475523342.0 2.58574662
104
+ 101 train 5.10030487 1664087320.0 4.03819673
105
+ 102 train 7.46133626 109443847.0 5.88239496
106
+ 103 train 1.31598953 20509114.1 29.4046416
107
+ 104 train 7.67114992 3772943.1 1.7810773
108
+ 105 cv 5.79324631 2011480.87 6.49745687
109
+ 106 train 6.94242801 8393763.9 1.06855549
110
+ 107 train 7.87136172 169155714.0 10.88264954
111
+ 108 train 0.53522819 3944779.68 83.44910045
112
+ 109 train 4.35426606 100728730.0 4.52555225
113
+ 110 train 0.32364009 322819020.0 9.14243349
114
+ 111 train 3.66563052 168383022.0 10.08158087
115
+ 112 test 7.87992683 5860425410.0 5.70103793
116
+ 113 train 0.26710223 3636478.69 3.57980514
117
+ 114 train 2.22284414 10277545.5 1.08850972
118
+ 115 train 0.97760298 103097759.0 1.86846107
119
+ 116 train 2.38140612 3319135.1 32.34044506
120
+ 117 train 9.32155776 231431645.0 9.07025187
121
+ 118 test 9.98475203 5235741.91 20.79358182
122
+ 119 train 8.75081696 1686046950.0 28.82373853
123
+ 120 train 0.80523448 3296282.95 6.37660281
124
+ 121 train 5.3432885 2298401.12 73.14491846
125
+ 122 cv 9.17743012 179784166.0 7.49928359
126
+ 123 train 3.00500403 4046947.23 16.42289134
127
+ 124 train 2.45033082 3027412.85 13.06101342
128
+ 125 train 5.47610781 1311680.04 70.54556374
129
+ 126 train 7.10811033 4812900.22 2.59864226
130
+ 127 test 9.46423039 7513000020.0 1.90577479
131
+ 128 train 2.7278834 495559360.0 5.33062584
132
+ 129 train 7.66466625 8300665.59 3.97436225
133
+ ---------- ------- ---------- ------------ -----------
numpy_networks/mlp_[256, 256, 256, 256].pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:655be8aae8c72cf27ff9de99522941f5d9b3bb8055c79f3d252b99b2995ff1fb
3
+ size 1583595
outputs/.ipynb_checkpoints/profile_raq_ra5.0_fkt100000.0_fkv10.0-checkpoint.txt ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 1.0 1.0
2
+ 0.9921259842519685 1.0178024589432006
3
+ 0.984251968503937 1.02419132486311
4
+ 0.9763779527559056 1.0321625073297056
5
+ 0.968503937007874 1.0421713204767553
6
+ 0.9606299212598425 1.0516163062641521
7
+ 0.952755905511811 1.0614091699716657
8
+ 0.9448818897637795 1.0715877577625534
9
+ 0.937007874015748 1.0810735731530021
10
+ 0.9291338582677166 1.089926726337548
11
+ 0.9212598425196851 1.0982001155465002
12
+ 0.9133858267716535 1.1058568867629859
13
+ 0.905511811023622 1.1129892549680713
14
+ 0.8976377952755905 1.1198585877833815
15
+ 0.889763779527559 1.1264196723670623
16
+ 0.8818897637795275 1.132586150591497
17
+ 0.8740157480314961 1.1384566274167192
18
+ 0.8661417322834646 1.1456330495711229
19
+ 0.8582677165354331 1.1523635015752953
20
+ 0.8503937007874016 1.1586039897141758
21
+ 0.84251968503937 1.164203314751497
22
+ 0.8346456692913385 1.1694048936942336
23
+ 0.8267716535433071 1.1742405969709162
24
+ 0.8188976377952756 1.17876629362649
25
+ 0.8110236220472441 1.18299415999214
26
+ 0.8031496062992126 1.1869087505503264
27
+ 0.7952755905511811 1.1905182923886117
28
+ 0.7874015748031495 1.1938298366132871
29
+ 0.7795275590551181 1.1968493236227868
30
+ 0.7716535433070866 1.199608124481236
31
+ 0.7637795275590551 1.2020144235173167
32
+ 0.7559055118110236 1.204030577737787
33
+ 0.7480314960629921 1.2057678448470486
34
+ 0.7401574803149606 1.2072262272213867
35
+ 0.7322834645669292 1.2084063445380056
36
+ 0.7244094488188977 1.2093082547736647
37
+ 0.7165354330708661 1.2099303424100876
38
+ 0.7086614173228346 1.2102700794433012
39
+ 0.7007874015748031 1.2103917008171063
40
+ 0.6929133858267716 1.210251464896192
41
+ 0.6850393700787402 1.2098211885199128
42
+ 0.6771653543307087 1.2089267084950532
43
+ 0.6692913385826772 1.2076845685485764
44
+ 0.6614173228346456 1.206129355332681
45
+ 0.6535433070866141 1.205371680302955
46
+ 0.6456692913385826 1.2049806974748283
47
+ 0.6377952755905512 1.2043954210198335
48
+ 0.6299212598425197 1.2036106204337338
49
+ 0.6220472440944882 1.2026201242791477
50
+ 0.6141732283464567 1.201416745705292
51
+ 0.6062992125984252 1.1999648008867543
52
+ 0.5984251968503937 1.1978809672582558
53
+ 0.5905511811023622 1.1958229061601988
54
+ 0.5826771653543307 1.19378547840869
55
+ 0.5748031496062992 1.1911429438142505
56
+ 0.5669291338582677 1.1881662557479544
57
+ 0.5590551181102362 1.1854410463378466
58
+ 0.5511811023622047 1.1823048964210143
59
+ 0.5433070866141733 1.178876982518623
60
+ 0.5354330708661417 1.1751299194641118
61
+ 0.5275590551181102 1.1710321065675582
62
+ 0.5196850393700787 1.1665436813486774
63
+ 0.5118110236220472 1.1616333192854635
64
+ 0.5039370078740157 1.1562582478198198
65
+ 0.49606299212598426 1.1503703986482516
66
+ 0.4881889763779528 1.1439156304548939
67
+ 0.48031496062992124 1.1368328201979288
68
+ 0.47244094488188976 1.129052797880398
69
+ 0.4645669291338583 1.1205223636539805
70
+ 0.45669291338582674 1.1117314823669315
71
+ 0.44881889763779526 1.102807930274322
72
+ 0.4409448818897638 1.0928255734605004
73
+ 0.4330708661417323 1.0816474431805874
74
+ 0.4251968503937008 1.0708782919973383
75
+ 0.41732283464566927 1.0605856578751718
76
+ 0.4094488188976378 1.049296390847412
77
+ 0.4015748031496063 1.0369421605031424
78
+ 0.39370078740157477 1.0255553010327565
79
+ 0.3858267716535433 1.0128340315070852
80
+ 0.3779527559055118 1.0019086718619157
81
+ 0.3700787401574803 0.9901977470956335
82
+ 0.36220472440944884 0.9766690777548703
83
+ 0.3543307086614173 0.9609864831395294
84
+ 0.3464566929133858 0.9427593339272101
85
+ 0.33858267716535434 0.9216423392280653
86
+ 0.3307086614173228 0.9017482646476044
87
+ 0.3228346456692913 0.8842250257427756
88
+ 0.31496062992125984 0.8691045183639151
89
+ 0.30708661417322836 0.8534496543474743
90
+ 0.2992125984251969 0.8371339522358304
91
+ 0.29133858267716534 0.8201284700467455
92
+ 0.28346456692913385 0.806397780911551
93
+ 0.2755905511811024 0.7931112166023593
94
+ 0.26771653543307083 0.7794296062844782
95
+ 0.25984251968503935 0.7653047810449107
96
+ 0.25196850393700787 0.7506458827692618
97
+ 0.2440944881889764 0.7354059250507149
98
+ 0.23622047244094488 0.7207409819202417
99
+ 0.22834645669291337 0.7064884218371179
100
+ 0.2204724409448819 0.6914737948523322
101
+ 0.2125984251968504 0.6742529297594405
102
+ 0.2047244094488189 0.6543064551474167
103
+ 0.19685039370078738 0.6327049937642623
104
+ 0.1889763779527559 0.6106332087469254
105
+ 0.18110236220472442 0.5903256804560127
106
+ 0.1732283464566929 0.5679499052457502
107
+ 0.1653543307086614 0.5440168042704815
108
+ 0.15748031496062992 0.5181675076528582
109
+ 0.14960629921259844 0.49097526265094993
110
+ 0.14173228346456693 0.4690522939899713
111
+ 0.13385826771653542 0.4467834165720918
112
+ 0.12598425196850394 0.4241461376614992
113
+ 0.11811023622047244 0.401149626112448
114
+ 0.11023622047244094 0.377774600216698
115
+ 0.10236220472440945 0.35399309283800373
116
+ 0.09448818897637795 0.3296930391700775
117
+ 0.08661417322834646 0.304730824684046
118
+ 0.07874015748031496 0.2792811035876094
119
+ 0.07086614173228346 0.2532962702159859
120
+ 0.06299212598425197 0.22674011680482922
121
+ 0.05511811023622047 0.20052832043844745
122
+ 0.047244094488188976 0.176698559684727
123
+ 0.03937007874015748 0.1523331129226084
124
+ 0.031496062992125984 0.1273659753191407
125
+ 0.023622047244094488 0.10175475262612885
126
+ 0.015748031496062992 0.07538511274619461
127
+ 0.007874015748031496 0.04814103197609307
128
+ 0.0 0.0
outputs/.ipynb_checkpoints/profile_raq_ra7.5_fkt1000000000.0_fkv25.0-checkpoint.png ADDED
outputs/.ipynb_checkpoints/profile_raq_ra7.5_fkt1000000000.0_fkv25.0-checkpoint.txt ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 1.0 1.0
2
+ 0.9921259842519685 0.9950688346403253
3
+ 0.984251968503937 0.9958109333720785
4
+ 0.9763779527559056 0.9962344373154483
5
+ 0.968503937007874 0.9963438127157567
6
+ 0.9606299212598425 0.9961419425701642
7
+ 0.952755905511811 0.9956301997182383
8
+ 0.9448818897637795 0.9948085046842855
9
+ 0.937007874015748 0.9940939520613497
10
+ 0.9291338582677166 0.9946201323314715
11
+ 0.9212598425196851 0.9950150370927474
12
+ 0.9133858267716535 0.9952845790823276
13
+ 0.905511811023622 0.9954341277024014
14
+ 0.8976377952755905 0.9954685574336534
15
+ 0.889763779527559 0.9953230018455609
16
+ 0.8818897637795275 0.9949449242563548
17
+ 0.8740157480314961 0.99448569940869
18
+ 0.8661417322834646 0.9939459441547048
19
+ 0.8582677165354331 0.9933258014056847
20
+ 0.8503937007874016 0.9926260108279151
21
+ 0.84251968503937 0.9918474918186793
22
+ 0.8346456692913385 0.9909907871995196
23
+ 0.8267716535433071 0.9906878114843723
24
+ 0.8188976377952756 0.9906106350029152
25
+ 0.8110236220472441 0.9904939840805035
26
+ 0.8031496062992126 0.9903390537912615
27
+ 0.7952755905511811 0.990146453812062
28
+ 0.7874015748031495 0.9899171435791735
29
+ 0.7795275590551181 0.9896528927122547
30
+ 0.7716535433070866 0.9893547314976728
31
+ 0.7637795275590551 0.9890612122526262
32
+ 0.7559055118110236 0.9893196595812495
33
+ 0.7480314960629921 0.9895564211609327
34
+ 0.7401574803149606 0.989772442085792
35
+ 0.7322834645669292 0.9899686248562548
36
+ 0.7244094488188977 0.9901458299221866
37
+ 0.7165354330708661 0.9903048761979846
38
+ 0.7086614173228346 0.990446541534676
39
+ 0.7007874015748031 0.9905715631213701
40
+ 0.6929133858267716 0.9906806328494628
41
+ 0.6850393700787402 0.9907736704034982
42
+ 0.6771653543307087 0.9908520347002285
43
+ 0.6692913385826772 0.990916300999504
44
+ 0.6614173228346456 0.9909670048852377
45
+ 0.6535433070866141 0.9910046409314675
46
+ 0.6456692913385826 0.9910506796923337
47
+ 0.6377952755905512 0.9910935873624599
48
+ 0.6299212598425197 0.9911256000361585
49
+ 0.6220472440944882 0.9911424770684899
50
+ 0.6141732283464567 0.9911429480301582
51
+ 0.6062992125984252 0.991133661750414
52
+ 0.5984251968503937 0.9911148755527556
53
+ 0.5905511811023622 0.9910868001204874
54
+ 0.5826771653543307 0.9910482981983292
55
+ 0.5748031496062992 0.9909960481561243
56
+ 0.5669291338582677 0.9909313899110497
57
+ 0.5590551181102362 0.9908529573606984
58
+ 0.5511811023622047 0.990802994004299
59
+ 0.5433070866141733 0.9907689107593368
60
+ 0.5354330708661417 0.9907286385148512
61
+ 0.5275590551181102 0.9906821287132976
62
+ 0.5196850393700787 0.9906292710827063
63
+ 0.5118110236220472 0.9905698879237498
64
+ 0.5039370078740157 0.9905037276030212
65
+ 0.49606299212598426 0.9903512862004434
66
+ 0.4881889763779528 0.9901705111090647
67
+ 0.48031496062992124 0.990394105267406
68
+ 0.47244094488188976 0.9907094238367197
69
+ 0.4645669291338583 0.9910166209293753
70
+ 0.45669291338582674 0.9913148228031922
71
+ 0.44881889763779526 0.991603010351046
72
+ 0.4409448818897638 0.9918800018564412
73
+ 0.4330708661417323 0.9920970234152766
74
+ 0.4251968503937008 0.9926663730962876
75
+ 0.41732283464566927 0.9934669371102811
76
+ 0.4094488188976378 0.9942288729603758
77
+ 0.4015748031496063 0.9949423232342525
78
+ 0.39370078740157477 0.9956252947243415
79
+ 0.3858267716535433 0.9962590339323164
80
+ 0.3779527559055118 0.9967479593090736
81
+ 0.3700787401574803 0.9971294513207575
82
+ 0.36220472440944884 0.9974497813570239
83
+ 0.3543307086614173 0.9977023709739241
84
+ 0.3464566929133858 0.9978637868769807
85
+ 0.33858267716535434 0.9978957472618809
86
+ 0.3307086614173228 0.9978327829053294
87
+ 0.3228346456692913 0.9976642055670972
88
+ 0.31496062992125984 0.9973751632626285
89
+ 0.30708661417322836 0.9969521187285088
90
+ 0.2992125984251969 0.9967668271065058
91
+ 0.29133858267716534 0.996799932369035
92
+ 0.28346456692913385 0.99663553290747
93
+ 0.2755905511811024 0.9962471955509634
94
+ 0.26771653543307083 0.9957151691949947
95
+ 0.25984251968503935 0.9949376720774421
96
+ 0.25196850393700787 0.9948204261550649
97
+ 0.2440944881889764 0.9944991487665473
98
+ 0.23622047244094488 0.9939093942741223
99
+ 0.22834645669291337 0.9949612343773557
100
+ 0.2204724409448819 0.9958097897762018
101
+ 0.2125984251968504 0.9958570680839077
102
+ 0.2047244094488189 0.9950326535250745
103
+ 0.19685039370078738 0.993269657110815
104
+ 0.1889763779527559 0.9904943987782671
105
+ 0.18110236220472442 0.9866067651892468
106
+ 0.1732283464566929 0.9814501496109934
107
+ 0.1653543307086614 0.9725284162704944
108
+ 0.15748031496062992 0.960591955881983
109
+ 0.14960629921259844 0.9459049031998263
110
+ 0.14173228346456693 0.9274022536864276
111
+ 0.13385826771653542 0.9055608536874807
112
+ 0.12598425196850394 0.8794161299393387
113
+ 0.11811023622047244 0.8483762304671509
114
+ 0.11023622047244094 0.8119839698697441
115
+ 0.10236220472440945 0.7693537351714411
116
+ 0.09448818897637795 0.7193951419727576
117
+ 0.08661417322834646 0.6644998392322397
118
+ 0.07874015748031496 0.6072823940873321
119
+ 0.07086614173228346 0.5458252537723316
120
+ 0.06299212598425197 0.4825227176208522
121
+ 0.05511811023622047 0.419475266210097
122
+ 0.047244094488188976 0.3521553434788696
123
+ 0.03937007874015748 0.2945246014638161
124
+ 0.031496062992125984 0.23725826815687479
125
+ 0.023622047244094488 0.17699945091965216
126
+ 0.015748031496062992 0.11634573154412606
127
+ 0.007874015748031496 0.0546717664461345
128
+ 0.0 0.0
stats/.ipynb_checkpoints/MLP_stats-checkpoint.txt ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ------------ --------- ------- ------- ---------
2
+ architecture mae train mae cv diff diff + cv
3
+ [32, 2] 0.01158 0.01249 0.00091 0.01294
4
+ [64, 2] 0.01077 0.01379 0.00302 0.0153
5
+ [128, 2] 0.00997 0.01177 0.00179 0.01266
6
+ [256, 2] 0.01 0.01226 0.00226 0.01339
7
+ [32, 3] 0.00922 0.01167 0.00245 0.01289
8
+ [64, 3] 0.00878 0.01149 0.00271 0.01284
9
+ [128, 3] 0.00834 0.01009 0.00175 0.01097
10
+ [256, 3] 0.0082 0.00927 0.00108 0.00981
11
+ [32, 4] 0.00797 0.00915 0.00118 0.00974
12
+ [64, 4] 0.00652 0.00821 0.00169 0.00905
13
+ [128, 4] 0.00592 0.0083 0.00237 0.00948
14
+ [256, 4] 0.00681 0.00794 0.00113 0.0085
15
+ [32, 5] 0.00877 0.01209 0.00332 0.01375
16
+ [64, 5] 0.00584 0.0084 0.00256 0.00968
17
+ [128, 5] 0.00608 0.00857 0.00249 0.00981
18
+ [256, 5] 0.00682 0.00814 0.00133 0.00881
19
+ [32, 6] 0.00799 0.01453 0.00654 0.0178
20
+ [64, 6] 0.0118 0.01206 0.00026 0.01219
21
+ [128, 6] 0.00607 0.00949 0.00342 0.0112
22
+ [256, 6] 0.00738 0.01015 0.00277 0.01153
23
+ ------------ --------- ------- ------- ---------
stats/.ipynb_checkpoints/overall_stats-checkpoint.txt ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ ----------------------- ------ ------ ------
2
+ Mean Absolute Error
3
+
4
+ Algorithm train cv test
5
+ Linear Regression 0.0385 0.0388 0.0676
6
+ Kernel Ridge Regression 0.0148 0.0147 0.0371
7
+ Neural Network 0.0071 0.0071 0.0187
8
+ Nearest neighbor 0.0 0.0282 0.0495
9
+ ----------------------- ------ ------ ------
stats/.ipynb_checkpoints/profiles_cv-checkpoint.pdf ADDED
Binary file (38.8 kB). View file
 
stats/.ipynb_checkpoints/profiles_test-checkpoint.pdf ADDED
Binary file (39.5 kB). View file
 
stats/MLP_stats.txt ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ------------ --------- ------- ------- ---------
2
+ architecture mae train mae cv diff diff + cv
3
+ [32, 2] 0.01158 0.01249 0.00091 0.01294
4
+ [64, 2] 0.01077 0.01379 0.00302 0.0153
5
+ [128, 2] 0.00997 0.01177 0.00179 0.01266
6
+ [256, 2] 0.01 0.01226 0.00226 0.01339
7
+ [32, 3] 0.00922 0.01167 0.00245 0.01289
8
+ [64, 3] 0.00878 0.01149 0.00271 0.01284
9
+ [128, 3] 0.00834 0.01009 0.00175 0.01097
10
+ [256, 3] 0.0082 0.00927 0.00108 0.00981
11
+ [32, 4] 0.00797 0.00915 0.00118 0.00974
12
+ [64, 4] 0.00652 0.00821 0.00169 0.00905
13
+ [128, 4] 0.00592 0.0083 0.00237 0.00948
14
+ [256, 4] 0.00681 0.00794 0.00113 0.0085
15
+ [32, 5] 0.00877 0.01209 0.00332 0.01375
16
+ [64, 5] 0.00584 0.0084 0.00256 0.00968
17
+ [128, 5] 0.00608 0.00857 0.00249 0.00981
18
+ [256, 5] 0.00682 0.00814 0.00133 0.00881
19
+ [32, 6] 0.00799 0.01453 0.00654 0.0178
20
+ [64, 6] 0.0118 0.01206 0.00026 0.01219
21
+ [128, 6] 0.00607 0.00949 0.00342 0.0112
22
+ [256, 6] 0.00738 0.01015 0.00277 0.01153
23
+ ------------ --------- ------- ------- ---------
stats/MLP_stats_modes.txt ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ------------ --------- ------- -------- ---------
2
+ architecture mae train mae cv diff diff + cv
3
+ [32, 2] 0.08696 0.06601 -0.02095 0.05554
4
+ [64, 2] 0.08639 0.06642 -0.01996 0.05644
5
+ [128, 2] 0.09331 0.06741 -0.02589 0.05447
6
+ [256, 2] 0.08665 0.06687 -0.01978 0.05698
7
+ [32, 3] 0.09488 0.06788 -0.02699 0.05439
8
+ [64, 3] 0.07847 0.06889 -0.00958 0.06411
9
+ [128, 3] 0.07296 0.07038 -0.00258 0.0691
10
+ [256, 3] 0.07649 0.06953 -0.00696 0.06604
11
+ [32, 4] 0.08799 0.06886 -0.01913 0.0593
12
+ [64, 4] 0.08595 0.0706 -0.01535 0.06293
13
+ [128, 4] 0.0852 0.07315 -0.01205 0.06713
14
+ [256, 4] 0.09259 0.07169 -0.0209 0.06124
15
+ [32, 5] 0.08676 0.06701 -0.01975 0.05713
16
+ [64, 5] 0.07997 0.07123 -0.00874 0.06687
17
+ [128, 5] 0.07262 0.07217 -0.00045 0.07195
18
+ [256, 5] 0.08688 0.07142 -0.01546 0.0637
19
+ [32, 6] 0.08931 0.069 -0.02031 0.05884
20
+ [64, 6] 0.08923 0.07369 -0.01554 0.06592
21
+ [128, 6] 0.09521 0.07428 -0.02094 0.06381
22
+ [256, 6] 0.08294 0.07244 -0.01051 0.06718
23
+ ------------ --------- ------- -------- ---------
stats/overall_stats.txt ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ ----------------------- ------ ------ ------
2
+ Mean Absolute Error
3
+
4
+ Algorithm train cv test
5
+ Linear Regression 0.0385 0.0388 0.0676
6
+ Kernel Ridge Regression 0.0148 0.0147 0.0371
7
+ Neural Network 0.0071 0.0071 0.0187
8
+ Nearest neighbor 0.0 0.0282 0.0495
9
+ ----------------------- ------ ------ ------
stats/overall_stats_modes.txt ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ ----------------------- ------ ------ ------
2
+ Mean Absolute Error
3
+
4
+ Algorithm train cv test
5
+ Linear Regression 0.1037 0.1048 0.1418
6
+ Kernel Ridge Regression 0.0906 0.1138 0.1646
7
+ Neural Network 0.0916 0.1036 0.1461
8
+ Nearest neighbor 0.0 0.1304 0.1643
9
+ ----------------------- ------ ------ ------
utils.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pickle
3
+
4
+ def selu(x):
5
+ alpha = 1.6732632423543772848170429916717
6
+ scale = 1.0507009873554804934193349852946
7
+ return scale*( np.maximum(0,x) + np.minimum(alpha*(np.exp(x)-1), 0) )
8
+
9
+ def non_dimensionalize_raq(x):
10
+ return (x-0.12624371)/(9.70723344-0.12624371)
11
+
12
+ def non_dimensionalize_fkt(x):
13
+ return (np.log10(x)-6.00352841978384)/(9.888820429862925-6.00352841978384)
14
+
15
+ def non_dimensionalize_fkv(x):
16
+ return (np.log10(x)-0.005251646002323797)/(1.9927988938926755-0.005251646002323797)
17
+
18
+ def dimensionalize_raq(x):
19
+ return x*(9.70723344-0.12624371) + 0.12624371
20
+
21
+ def dimensionalize_fkt(x):
22
+ return 10**(x*(9.888820429862925-6.00352841978384)+6.00352841978384)
23
+
24
+ def dimensionalize_fkv(x):
25
+ return 10**(x*(1.9927988938926755-0.005251646002323797)+0.005251646002323797)
26
+
27
+ def get_input(raq_ra, fkt, fkp, y_prof):
28
+
29
+ x = np.zeros((len(raq_ra)*len(y_prof), 4))
30
+
31
+ cntr = 0
32
+ for i in range(len(raq_ra)):
33
+ for j in range(len(y_prof)):
34
+ x[cntr,0] = non_dimensionalize_raq(raq_ra[i])
35
+ x[cntr,1] = non_dimensionalize_fkt(fkt[i])
36
+ x[cntr,2] = non_dimensionalize_fkv(fkp[i])
37
+ x[cntr,3] = y_prof[j]
38
+ cntr += 1
39
+
40
+ return x
41
+
42
+ def get_profile(inp, mlp, num_sims=1, num_points=128):
43
+
44
+ num_layers = len(mlp)-1
45
+ y_pred = inp
46
+ res = []
47
+ for l in range(num_layers+1):
48
+
49
+ y_pred = y_pred @ mlp[l][0].T + mlp[l][1]
50
+
51
+ if l in [num_layers-1]:
52
+ y_pred = np.concatenate((inp,y_pred), axis=-1)
53
+
54
+ if l != num_layers:
55
+ for r in res:
56
+ y_pred += r
57
+
58
+ y_pred = selu(y_pred)
59
+ res.append(y_pred)
60
+
61
+ y_pred = y_pred.reshape(num_sims, num_points)
62
+ y_pred[:,0] = 1.
63
+ y_pred[:,-1] = 0.
64
+
65
+ return y_pred
66
+
67
+
68
+
69
+