Datasets:
iter
int64 0
7.65k
| sample
stringclasses 1
value | H0_H1
stringclasses 2
values | Hi
stringclasses 27
values | n1
int64 20
1k
| n2
int64 20
1k
| perc
int64 0
50
| real_perc1
float64 0
95
| real_perc2
float64 0
95
| Peto_test
float64 -7.97
10.1
| Gehan_test
float64 -10.22
8.5
| logrank_test
float64 -8.93
6.76
| CoxMantel_test
float64 -9.17
7.03
| BN_GPH_test
float64 0
109
| BN_MCE_test
float64 0
143
| BN_SCE_test
float64 0
114
| Q_test
float64 -8.08
10.1
| MAX_Value_test
float64 0
10.2
| MIN3_test
float64 0
1
| WLg_logrank_test
float64 0
84.2
| WLg_TaroneWare_test
float64 0
99.4
| WLg_Breslow_test
float64 0
106
| WLg_PetoPrentice_test
float64 0
71.9
| WLg_Prentice_test
float64 0
71.6
| WKM_test
float64 -8.89
10.7
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | train | H0 | H01 | 20 | 20 | 0 | 0 | 0 | 0.297551 | -0.297551 | -0.787612 | -0.788933 | 1.840232 | 3.503853 | 1.554255 | -0.788933 | 0.787612 | 0.320263 | 0.622416 | 0.225214 | 0.087665 | 1.168919 | 0.992588 | 0.301342 |
0 | train | H1 | H01 | 20 | 20 | 0 | 0 | 0 | 0.513952 | -0.513952 | -0.310338 | -0.303021 | 0.188742 | 2.101485 | 0.364205 | 0.513952 | 0.513952 | 0.551612 | 0.091822 | 0.142393 | 0.263072 | 0.0032 | 0.022809 | 0.520499 |
0 | train | H0 | H01 | 30 | 30 | 0 | 0 | 0 | 0.177413 | -0.177413 | 0.173177 | 0.170978 | 0.136924 | 3.775261 | 0.503774 | 0.177413 | 0.177413 | 0.286777 | 0.029233 | 0.00478 | 0.031407 | 0.279692 | 0.436632 | 0.17891 |
0 | train | H1 | H01 | 30 | 30 | 0 | 0 | 0 | -0.162629 | 0.162629 | 1.069907 | 1.089918 | 4.989246 | 4.997123 | 4.839961 | 1.089918 | 1.069907 | 0.088923 | 1.187921 | 0.309793 | 0.026362 | 3.937977 | 3.753853 | -0.164001 |
0 | train | H0 | H01 | 50 | 50 | 0 | 0 | 0 | 0.37916 | -0.37916 | -0.914703 | -0.927406 | 2.662669 | 7.818048 | 1.855357 | 0.37916 | 0.914703 | 0.049926 | 0.860082 | 0.2589 | 0.143477 | 1.677242 | 1.474896 | 0.38107 |
0 | train | H1 | H01 | 50 | 50 | 0 | 0 | 0 | 0.565293 | -0.565293 | 0.041903 | 0.041396 | 1.423335 | 1.335342 | 1.535772 | 0.565293 | 0.565293 | 0.463993 | 0.001714 | 0.107755 | 0.318733 | 0.478031 | 0.432948 | 0.568141 |
0 | train | H0 | H01 | 75 | 75 | 0 | 0 | 0 | 0.964114 | -0.964114 | -0.288415 | -0.286881 | 1.69558 | 4.098083 | 2.226717 | 0.964114 | 0.964114 | 0.251066 | 0.082301 | 0.476434 | 0.92987 | 0.228385 | 0.279804 | 0.960873 |
0 | train | H1 | H01 | 75 | 75 | 0 | 0 | 0 | 2.065421 | -2.065421 | -1.58254 | -1.578674 | 3.848608 | 4.701962 | 4.511193 | 2.065421 | 2.065421 | 0.039544 | 2.492212 | 3.456187 | 4.289995 | 0.439017 | 0.423629 | 2.058478 |
0 | train | H0 | H01 | 100 | 100 | 0 | 0 | 0 | 2.159956 | -2.159956 | -2.119579 | -2.133984 | 4.977076 | 5.026849 | 4.934444 | 2.159956 | 2.159956 | 0.030359 | 4.553886 | 4.99028 | 4.689571 | 2.359734 | 2.423254 | 2.165376 |
0 | train | H1 | H01 | 100 | 100 | 0 | 0 | 0 | 1.959598 | -1.959598 | -1.190542 | -1.184431 | 3.973202 | 5.865962 | 4.976259 | 1.959598 | 1.959598 | 0.04947 | 1.402876 | 2.70519 | 3.855503 | 0.012534 | 0.007767 | 1.964515 |
0 | train | H0 | H01 | 150 | 150 | 0 | 0 | 0 | 0.863895 | -0.863895 | -0.845699 | -0.843646 | 0.750277 | 0.890997 | 0.780519 | 0.863895 | 0.863895 | 0.386853 | 0.711738 | 0.717475 | 0.745968 | 0.331242 | 0.381336 | 0.865338 |
0 | train | H1 | H01 | 150 | 150 | 0 | 0 | 0 | 1.212647 | -1.212647 | -1.699598 | -1.713951 | 3.594532 | 5.237844 | 3.313002 | -1.713951 | 1.699598 | 0.155187 | 2.937628 | 1.921175 | 1.470873 | 3.105898 | 2.982316 | 1.214673 |
0 | train | H0 | H01 | 200 | 200 | 0 | 0 | 0 | 1.130483 | -1.130483 | -1.220792 | -1.219776 | 1.5307 | 1.679293 | 1.507572 | -1.219776 | 1.220792 | 0.257677 | 1.487852 | 1.509154 | 1.277909 | 0.933933 | 0.899922 | 1.131899 |
0 | train | H1 | H01 | 200 | 200 | 0 | 0 | 0 | -0.260348 | 0.260348 | 0.503254 | 0.502555 | 0.262266 | 3.890668 | 0.38136 | 0.502555 | 0.503254 | 0.273515 | 0.252561 | 0.250414 | 0.067778 | 0.38694 | 0.477453 | -0.260674 |
0 | train | H0 | H01 | 300 | 300 | 0 | 0 | 0 | -0.998546 | 0.998546 | 1.257883 | 1.258076 | 1.73387 | 2.597221 | 1.620809 | -0.998546 | 1.257883 | 0.317611 | 1.582754 | 1.160427 | 0.996889 | 1.476485 | 1.415125 | -0.999379 |
0 | train | H1 | H01 | 300 | 300 | 0 | 0 | 0 | 1.807274 | -1.807274 | -1.570993 | -1.570808 | 3.260764 | 3.26832 | 3.268844 | -1.570808 | 1.807274 | 0.070485 | 2.467439 | 3.075333 | 3.268852 | 0.831316 | 0.905388 | 1.808782 |
0 | train | H0 | H01 | 500 | 500 | 0 | 0 | 0 | -0.414309 | 0.414309 | 0.867838 | 0.867829 | 1.205183 | 2.010545 | 1.223212 | 0.867829 | 0.867838 | 0.542479 | 0.753128 | 0.341843 | 0.171589 | 1.239592 | 1.309929 | -0.414517 |
0 | train | H1 | H01 | 500 | 500 | 0 | 0 | 0 | 2.961699 | -2.961699 | -2.025371 | -2.029771 | 6.69504 | 12.228367 | 10.026804 | 2.961699 | 2.961699 | 0.003045 | 4.119969 | 6.023191 | 8.795104 | 0.354162 | 0.330033 | 2.963181 |
0 | train | H0 | H01 | 1,000 | 1,000 | 0 | 0 | 0 | -0.220008 | 0.220008 | 0.019275 | 0.01927 | 0.502928 | 1.530797 | 0.167129 | 0.01927 | 0.220008 | 0.675181 | 0.000371 | 0.054873 | 0.048394 | 0.032387 | 0.025087 | -0.220063 |
0 | train | H1 | H01 | 1,000 | 1,000 | 0 | 0 | 0 | 2.599284 | -2.599284 | -0.745395 | -0.74505 | 10.087342 | 15.935188 | 15.911598 | 2.599284 | 2.599284 | 0.000351 | 0.5551 | 3.052244 | 6.766085 | 1.563348 | 1.617375 | 2.599934 |
0 | train | H0 | H01 | 20 | 20 | 10 | 5 | 5 | -0.803648 | 0.834155 | -0.227265 | -0.227779 | 5.844423 | 5.767766 | 5.747528 | -0.803648 | 0.834155 | 0.056486 | 0.051883 | 0.165704 | 0.696034 | 2.296707 | 2.096198 | -0.833031 |
0 | train | H1 | H01 | 20 | 20 | 10 | 15 | 10 | -0.854567 | 0.86663 | 0.478485 | 0.468217 | 1.553334 | 2.389599 | 1.313583 | -0.854567 | 0.86663 | 0.381338 | 0.219227 | 0.580792 | 0.746895 | 0.013202 | 0.000593 | -0.875433 |
0 | train | H0 | H01 | 30 | 30 | 10 | 3.333333 | 20 | 1.385106 | -1.393483 | -0.951161 | -0.950325 | 2.468493 | 2.847636 | 2.436157 | 1.385106 | 1.393483 | 0.154684 | 0.903118 | 1.621126 | 1.940468 | 0.045906 | 0.058 | 1.42318 |
0 | train | H1 | H01 | 30 | 30 | 10 | 16.666667 | 6.666667 | -0.344814 | 0.300143 | 0.949477 | 0.949473 | 2.112193 | 2.178882 | 2.386384 | -0.344814 | 0.949477 | 0.303252 | 0.901499 | 0.394413 | 0.09038 | 2.244038 | 2.225634 | -0.32168 |
0 | train | H0 | H01 | 50 | 50 | 10 | 16 | 6 | 0.072932 | -0.141537 | 0.755727 | 0.749626 | 3.769998 | 4.149782 | 3.480493 | 0.072932 | 0.755727 | 0.175477 | 0.561939 | 0.03036 | 0.019989 | 2.400511 | 2.144201 | 0.134531 |
0 | train | H1 | H01 | 50 | 50 | 10 | 22 | 16 | 0.720911 | -0.767959 | -0.490603 | -0.489079 | 0.541709 | 2.622597 | 0.70775 | 0.720911 | 0.767959 | 0.43766 | 0.239198 | 0.393311 | 0.59045 | 0.000039 | 0.000625 | 0.77615 |
0 | train | H0 | H01 | 75 | 75 | 10 | 5.333333 | 8 | -1.274065 | 1.270591 | 1.112131 | 1.11132 | 1.394227 | 3.394136 | 1.628833 | -1.274065 | 1.270591 | 0.203373 | 1.235031 | 1.377534 | 1.61594 | 0.445386 | 0.38195 | -1.272001 |
0 | train | H1 | H01 | 75 | 75 | 10 | 10.666667 | 6.666667 | -1.105811 | 1.088252 | 1.498018 | 1.500409 | 2.384104 | 3.118999 | 2.543874 | 1.500409 | 1.498018 | 0.274239 | 2.251227 | 1.813484 | 1.186464 | 2.558084 | 2.667369 | -1.093352 |
0 | train | H0 | H01 | 100 | 100 | 10 | 6 | 6 | -0.060556 | 0.059249 | 0.156148 | 0.15558 | 0.045673 | 0.477786 | 0.052446 | 0.15558 | 0.156148 | 0.923744 | 0.024205 | 0.01609 | 0.003505 | 0.058768 | 0.038843 | -0.05978 |
0 | train | H1 | H01 | 100 | 100 | 10 | 7 | 12 | 1.219882 | -1.191099 | -1.602709 | -1.615315 | 3.212383 | 5.182512 | 2.923262 | -1.615315 | 1.602709 | 0.15891 | 2.609242 | 1.738051 | 1.417418 | 2.640368 | 2.519638 | 1.202653 |
0 | train | H0 | H01 | 150 | 150 | 10 | 6 | 14 | 1.532448 | -1.54283 | -1.253053 | -1.25673 | 2.567139 | 2.653406 | 2.422411 | -1.25673 | 1.54283 | 0.118444 | 1.57937 | 2.258574 | 2.381107 | 0.380908 | 0.409196 | 1.561336 |
0 | train | H1 | H01 | 150 | 150 | 10 | 10 | 8 | 0.874087 | -0.905215 | -0.302112 | -0.302143 | 1.275032 | 4.605504 | 1.867345 | 0.874087 | 0.905215 | 0.20307 | 0.091291 | 0.380581 | 0.819865 | 0.187716 | 0.216119 | 0.912835 |
0 | train | H0 | H01 | 200 | 200 | 10 | 14 | 6.5 | -1.856988 | 1.882828 | 1.550296 | 1.544774 | 3.07508 | 4.621068 | 3.512504 | -1.856988 | 1.882828 | 0.057796 | 2.386327 | 3.05195 | 3.549483 | 0.586896 | 0.549903 | -1.897242 |
0 | train | H1 | H01 | 200 | 200 | 10 | 12 | 7 | 0.04899 | -0.085601 | 0.818681 | 0.817514 | 2.842662 | 3.76148 | 3.729655 | 0.04899 | 0.818681 | 0.154923 | 0.66833 | 0.132406 | 0.007332 | 2.580764 | 2.584691 | 0.084427 |
0 | train | H0 | H01 | 300 | 300 | 10 | 11 | 11.666667 | -1.198386 | 1.21379 | 0.832049 | 0.830891 | 1.83572 | 1.918953 | 1.739714 | -1.198386 | 1.21379 | 0.221637 | 0.69038 | 1.271525 | 1.47333 | 0.035625 | 0.039922 | -1.222187 |
0 | train | H1 | H01 | 300 | 300 | 10 | 11 | 14.666667 | 3.669831 | -3.714601 | -2.825099 | -2.833801 | 12.522063 | 20.144645 | 14.728514 | 3.669831 | 3.714601 | 0.000158 | 8.030431 | 11.177037 | 13.878206 | 1.432732 | 1.342919 | 3.747663 |
0 | train | H0 | H01 | 500 | 500 | 10 | 10.6 | 10.2 | -0.049533 | 0.091513 | -0.610215 | -0.610002 | 1.855609 | 2.168215 | 2.118598 | -0.610002 | 0.610215 | 0.346699 | 0.372102 | 0.040017 | 0.008373 | 1.410879 | 1.388317 | -0.091702 |
0 | train | H1 | H01 | 500 | 500 | 10 | 11.6 | 9.6 | 1.381327 | -1.444591 | -0.290169 | -0.289811 | 4.238691 | 7.73377 | 6.222161 | 1.381327 | 1.444591 | 0.044553 | 0.083991 | 0.822611 | 2.088389 | 0.990656 | 1.046019 | 1.456193 |
0 | train | H0 | H01 | 1,000 | 1,000 | 10 | 9.6 | 11.4 | 1.250728 | -1.267598 | -0.815954 | -0.815618 | 1.958135 | 1.92318 | 2.011145 | 1.250728 | 1.267598 | 0.201757 | 0.665234 | 1.266142 | 1.606811 | 0.011333 | 0.011251 | 1.276562 |
0 | train | H1 | H01 | 1,000 | 1,000 | 10 | 10.3 | 10.8 | 3.757314 | -3.85226 | -1.746078 | -1.746665 | 21.735629 | 24.390165 | 26.856193 | 3.757314 | 3.85226 | 0.000001 | 3.050838 | 8.849246 | 14.874501 | 0.759059 | 0.74599 | 3.88065 |
0 | train | H0 | H01 | 20 | 20 | 20 | 10 | 20 | 0.824175 | -0.792447 | -0.832959 | -0.821327 | 0.845007 | 3.965367 | 0.704754 | 0.824175 | 0.832959 | 0.265228 | 0.674578 | 0.821805 | 0.626073 | 0.299302 | 0.518658 | 0.818786 |
0 | train | H1 | H01 | 20 | 20 | 20 | 20 | 25 | 1.77373 | -1.789438 | -1.307196 | -1.318637 | 4.173654 | 4.262932 | 4.287604 | 1.77373 | 1.789438 | 0.070336 | 1.738805 | 2.609862 | 3.25083 | 0.09821 | 0.126405 | 1.809738 |
0 | train | H0 | H01 | 30 | 30 | 20 | 13.333333 | 13.333333 | -1.045294 | 1.062471 | 1.071277 | 1.0787 | 1.167827 | 2.348465 | 1.186867 | -1.045294 | 1.071277 | 0.28005 | 1.163593 | 1.162276 | 1.133013 | 0.845891 | 0.813085 | -1.080206 |
0 | train | H1 | H01 | 30 | 30 | 20 | 33.333333 | 16.666667 | 0.769564 | -0.792499 | -0.504345 | -0.512545 | 0.820746 | 3.716933 | 1.016548 | 0.769564 | 0.792499 | 0.293697 | 0.262702 | 0.404436 | 0.631524 | 0.01199 | 0.02454 | 0.815216 |
0 | train | H0 | H01 | 50 | 50 | 20 | 18 | 20 | 0.331941 | -0.356299 | -0.089421 | -0.088673 | 0.399706 | 0.362346 | 0.421626 | -0.088673 | 0.356299 | 0.721619 | 0.007863 | 0.064885 | 0.126574 | 0.100394 | 0.08158 | 0.356296 |
0 | train | H1 | H01 | 50 | 50 | 20 | 16 | 18 | 0.432225 | -0.430155 | -0.29474 | -0.292758 | 0.15231 | 1.230955 | 0.253106 | 0.432225 | 0.430155 | 0.662052 | 0.085707 | 0.106174 | 0.184826 | 0.000198 | 0.000347 | 0.437082 |
0 | train | H0 | H01 | 75 | 75 | 20 | 26.666667 | 25.333333 | 1.480763 | -1.528104 | -1.032901 | -1.035747 | 3.156837 | 3.586061 | 3.305425 | 1.480763 | 1.528104 | 0.122391 | 1.072771 | 1.768524 | 2.344566 | 0.011751 | 0.015472 | 1.544815 |
0 | train | H1 | H01 | 75 | 75 | 20 | 24 | 25.333333 | 0.747404 | -0.657571 | -0.999997 | -1.005913 | 1.262669 | 1.277464 | 1.271278 | -1.005913 | 0.999997 | 0.506275 | 1.01186 | 0.665575 | 0.432454 | 1.148028 | 1.133511 | 0.664649 |
0 | train | H0 | H01 | 100 | 100 | 20 | 22 | 23 | 0.455679 | -0.519537 | 0.069876 | 0.069544 | 1.691867 | 1.851734 | 1.806817 | 0.455679 | 0.519537 | 0.405186 | 0.004836 | 0.078748 | 0.269604 | 0.651526 | 0.643417 | 0.523 |
0 | train | H1 | H01 | 100 | 100 | 20 | 22 | 25 | 0.520612 | -0.546046 | -0.218798 | -0.217406 | 0.414032 | 1.478993 | 0.717062 | 0.520612 | 0.546046 | 0.578733 | 0.047265 | 0.115541 | 0.298177 | 0.079838 | 0.095544 | 0.555237 |
0 | train | H0 | H01 | 150 | 150 | 20 | 20 | 31.333333 | 2.285759 | -2.294352 | -2.094886 | -2.094429 | 5.338086 | 5.42237 | 5.251095 | -2.094429 | 2.294352 | 0.020105 | 4.386632 | 5.293061 | 5.279468 | 1.711253 | 1.794979 | 2.324376 |
0 | train | H1 | H01 | 150 | 150 | 20 | 24.666667 | 21.333333 | -0.237685 | 0.151247 | 0.697763 | 0.697388 | 1.192507 | 1.510661 | 1.571595 | -0.237685 | 0.697763 | 0.455756 | 0.48635 | 0.221963 | 0.022902 | 1.302549 | 1.222048 | -0.156639 |
0 | train | H0 | H01 | 200 | 200 | 20 | 20.5 | 21 | 0.335253 | -0.401659 | 0.25308 | 0.252943 | 2.652354 | 4.403275 | 2.100223 | 0.252943 | 0.401659 | 0.221082 | 0.06398 | 0.051369 | 0.16122 | 0.920294 | 0.864626 | 0.407692 |
0 | train | H1 | H01 | 200 | 200 | 20 | 21 | 20 | 2.749608 | -2.737033 | -2.317499 | -2.346261 | 7.324129 | 8.332189 | 7.845022 | 2.749608 | 2.737033 | 0.005528 | 5.504939 | 6.708565 | 7.528505 | 1.49635 | 1.477055 | 2.774547 |
0 | train | H0 | H01 | 300 | 300 | 20 | 25 | 19.666667 | -1.368661 | 1.349631 | 1.453353 | 1.451429 | 2.114027 | 2.994807 | 2.116314 | -1.368661 | 1.453353 | 0.168605 | 2.106646 | 1.799454 | 1.821571 | 1.454967 | 1.430591 | -1.3767 |
0 | train | H1 | H01 | 300 | 300 | 20 | 21 | 18.333333 | 1.777565 | -1.90831 | -0.651661 | -0.651014 | 7.847443 | 10.190463 | 8.931223 | 1.777565 | 1.90831 | 0.011498 | 0.42382 | 1.9705 | 3.647 | 0.808495 | 0.816092 | 1.935888 |
0 | train | H0 | H01 | 500 | 500 | 20 | 19.4 | 18.2 | -1.844477 | 1.824736 | 1.762045 | 1.765036 | 3.772777 | 5.890243 | 3.444843 | -1.844477 | 1.824736 | 0.064718 | 3.115353 | 3.756669 | 3.332395 | 1.425462 | 1.523659 | -1.847201 |
0 | train | H1 | H01 | 500 | 500 | 20 | 19.8 | 18.8 | 2.338764 | -2.434977 | -1.274999 | -1.276134 | 9.570183 | 9.208714 | 10.040713 | 2.338764 | 2.434977 | 0.006602 | 1.628518 | 4.067596 | 5.941023 | 0.132245 | 0.122116 | 2.465979 |
0 | train | H0 | H01 | 1,000 | 1,000 | 20 | 18.4 | 19.1 | 1.653914 | -1.667306 | -1.27915 | -1.279634 | 3.230276 | 3.80562 | 3.055585 | 1.653914 | 1.667306 | 0.091849 | 1.637462 | 2.483657 | 2.780034 | 0.22619 | 0.247617 | 1.685722 |
0 | train | H1 | H01 | 1,000 | 1,000 | 20 | 23.2 | 20.6 | 2.123324 | -2.31799 | -0.642739 | -0.643066 | 13.041257 | 13.237005 | 15.27561 | 2.123324 | 2.31799 | 0.000482 | 0.413534 | 2.553699 | 5.380721 | 1.977338 | 1.925075 | 2.357471 |
0 | train | H0 | H01 | 20 | 20 | 30 | 30 | 45 | 0.407657 | -0.265446 | -0.755308 | -0.741812 | 1.587526 | 1.814292 | 1.52868 | -0.741812 | 0.755308 | 0.465641 | 0.550286 | 0.200475 | 0.06979 | 1.020614 | 0.900487 | 0.315759 |
0 | train | H1 | H01 | 20 | 20 | 30 | 30 | 35 | 1.098945 | -1.289519 | -0.394569 | -0.390539 | 5.47958 | 5.659539 | 5.289003 | -0.390539 | 1.289519 | 0.071041 | 0.152521 | 0.98001 | 1.665347 | 0.594141 | 0.397952 | 1.275541 |
0 | train | H0 | H01 | 30 | 30 | 30 | 26.666667 | 33.333333 | 1.248024 | -1.363656 | -0.829539 | -0.835212 | 3.125383 | 4.233548 | 2.90171 | 1.248024 | 1.363656 | 0.163342 | 0.697578 | 1.500093 | 1.868149 | 0.00498 | 0.000586 | 1.393919 |
0 | train | H1 | H01 | 30 | 30 | 30 | 26.666667 | 33.333333 | 0.933412 | -0.93641 | -0.896393 | -0.902476 | 0.831838 | 4.386144 | 0.8731 | -0.902476 | 0.93641 | 0.222674 | 0.814464 | 0.757432 | 0.874268 | 0.313215 | 0.251958 | 0.977905 |
0 | train | H0 | H01 | 50 | 50 | 30 | 12 | 24 | -0.008914 | 0.115572 | -0.80164 | -0.797437 | 3.854138 | 3.78784 | 4.04589 | -0.797437 | 0.80164 | 0.132265 | 0.635906 | 0.073524 | 0.013332 | 2.497377 | 2.427664 | -0.106022 |
0 | train | H1 | H01 | 50 | 50 | 30 | 38 | 24 | 0.539551 | -0.683358 | 0.033857 | 0.03362 | 2.365282 | 2.356311 | 2.355011 | 0.539551 | 0.683358 | 0.308046 | 0.00113 | 0.208318 | 0.46743 | 0.854836 | 0.718598 | 0.697272 |
0 | train | H0 | H01 | 75 | 75 | 30 | 30.666667 | 26.666667 | -1.256811 | 1.245083 | 1.118275 | 1.119873 | 1.583557 | 1.606149 | 1.610186 | -1.256811 | 1.245083 | 0.204852 | 1.254117 | 1.470657 | 1.549969 | 0.479571 | 0.505506 | -1.267848 |
0 | train | H1 | H01 | 75 | 75 | 30 | 36 | 32 | 1.487677 | -1.582351 | -1.124759 | -1.147391 | 2.920789 | 3.531843 | 3.037666 | -1.147391 | 1.582351 | 0.101078 | 1.316507 | 1.999613 | 2.506631 | 0.059392 | 0.057723 | 1.639649 |
0 | train | H0 | H01 | 100 | 100 | 30 | 30 | 37 | 0.853882 | -0.775731 | -1.081509 | -1.075251 | 1.373759 | 1.356463 | 1.390693 | -1.075251 | 1.081509 | 0.424033 | 1.156164 | 0.859437 | 0.601166 | 1.217827 | 1.211355 | 0.799445 |
0 | train | H1 | H01 | 100 | 100 | 30 | 36 | 23 | -0.221138 | 0.067954 | 0.705305 | 0.702843 | 1.663794 | 4.630422 | 2.034658 | 0.702843 | 0.705305 | 0.200948 | 0.493988 | 0.159006 | 0.004616 | 1.726401 | 1.759971 | -0.071006 |
0 | train | H0 | H01 | 150 | 150 | 30 | 36.666667 | 28 | -1.074778 | 1.029137 | 1.214698 | 1.210914 | 1.514587 | 1.604294 | 1.503252 | -1.074778 | 1.214698 | 0.289987 | 1.466312 | 1.251469 | 1.058312 | 1.393494 | 1.37973 | -1.058149 |
0 | train | H1 | H01 | 150 | 150 | 30 | 23.333333 | 32.666667 | 1.32551 | -1.182069 | -1.787445 | -1.793351 | 4.419992 | 5.51441 | 4.141883 | -1.793351 | 1.787445 | 0.126067 | 3.216108 | 1.909378 | 1.397556 | 4.108235 | 4.078711 | 1.218822 |
0 | train | H0 | H01 | 200 | 200 | 30 | 25.5 | 25 | 1.090286 | -1.174809 | -0.667883 | -0.667772 | 1.770602 | 2.740936 | 2.047558 | 1.090286 | 1.174809 | 0.232908 | 0.445919 | 0.927274 | 1.381341 | 0.013874 | 0.014541 | 1.1929 |
0 | train | H1 | H01 | 200 | 200 | 30 | 26 | 31.5 | 3.718015 | -3.829487 | -3.012151 | -3.041385 | 14.577457 | 16.512133 | 15.465918 | 3.718015 | 3.829487 | 0.000089 | 9.250024 | 12.549546 | 14.793038 | 1.843675 | 1.88484 | 3.917808 |
0 | train | H0 | H01 | 300 | 300 | 30 | 29.333333 | 30.666667 | -0.728133 | 0.844798 | 0.306988 | 0.306734 | 1.418064 | 3.174054 | 1.681226 | -0.728133 | 0.844798 | 0.36556 | 0.094086 | 0.381593 | 0.713397 | 0.153153 | 0.176771 | -0.860678 |
0 | train | H1 | H01 | 300 | 300 | 30 | 28.666667 | 25.666667 | 3.234561 | -3.382965 | -2.323376 | -2.345086 | 12.370137 | 20.018904 | 13.99755 | 3.234561 | 3.382965 | 0.000168 | 5.499428 | 8.644374 | 11.536311 | 0.304443 | 0.289616 | 3.471008 |
0 | train | H0 | H01 | 500 | 500 | 30 | 30 | 32.2 | 1.081769 | -1.134488 | -0.798476 | -0.797736 | 1.614276 | 1.633271 | 1.58824 | 1.081769 | 1.134488 | 0.247504 | 0.636383 | 1.068313 | 1.286987 | 0.022785 | 0.027696 | 1.156433 |
0 | train | H1 | H01 | 500 | 500 | 30 | 29.6 | 31.6 | 3.821389 | -3.881264 | -3.193785 | -3.221225 | 14.774733 | 19.66381 | 15.942269 | 3.821389 | 3.881264 | 0.000072 | 10.376289 | 12.846617 | 15.127841 | 2.421045 | 2.430861 | 3.968673 |
0 | train | H0 | H01 | 1,000 | 1,000 | 30 | 31 | 31.2 | -0.060423 | 0.057156 | 0.069228 | 0.069181 | 0.012648 | 0.506122 | 0.004926 | -0.060423 | 0.069228 | 0.917544 | 0.004786 | 0.0007 | 0.003266 | 0.007628 | 0.005909 | -0.058534 |
0 | train | H1 | H01 | 1,000 | 1,000 | 30 | 28.4 | 31.1 | 5.96303 | -6.145509 | -4.597427 | -4.628103 | 40.997567 | 42.33373 | 42.921905 | 5.96303 | 6.145509 | 0 | 21.419336 | 31.669924 | 37.946179 | 2.747855 | 2.791906 | 6.291102 |
0 | train | H0 | H01 | 20 | 20 | 40 | 50 | 30 | -1.986289 | 1.954615 | 1.830505 | 1.863453 | 4.188969 | 5.839317 | 4.08458 | -1.986289 | 1.954615 | 0.043957 | 3.472458 | 3.964774 | 3.872941 | 1.249406 | 1.512532 | -2.0145 |
0 | train | H1 | H01 | 20 | 20 | 40 | 40 | 25 | -0.048054 | 0.032411 | -0.042355 | -0.041987 | 0.109168 | 0.907981 | 0.074633 | -0.041987 | 0.042355 | 0.823501 | 0.001763 | 0.002192 | 0.001041 | 0.000118 | 0.004811 | -0.039598 |
0 | train | H0 | H01 | 30 | 30 | 40 | 40 | 43.333333 | 0.243445 | -0.268707 | -0.178829 | -0.176696 | 0.061874 | 1.442812 | 0.091146 | -0.176696 | 0.268707 | 0.695531 | 0.031221 | 0.038376 | 0.071752 | 0.019242 | 0.04028 | 0.299662 |
0 | train | H1 | H01 | 30 | 30 | 40 | 16.666667 | 46.666667 | 2.006477 | -1.782034 | -2.415713 | -2.487463 | 8.468571 | 12.845458 | 8.19277 | -2.487463 | 2.415713 | 0.004983 | 6.18747 | 4.252939 | 3.168865 | 7.838714 | 7.504559 | 1.82909 |
0 | train | H0 | H01 | 50 | 50 | 40 | 36 | 40 | 1.155198 | -1.157645 | -1.015396 | -1.014224 | 1.31519 | 4.014844 | 1.434847 | -1.014224 | 1.157645 | 0.234719 | 1.02865 | 1.117153 | 1.343163 | 0.198164 | 0.164395 | 1.188292 |
0 | train | H1 | H01 | 50 | 50 | 40 | 46 | 40 | 0.736143 | -0.935641 | -0.391806 | -0.393093 | 2.000192 | 1.950276 | 1.999808 | -0.393093 | 0.935641 | 0.33497 | 0.154522 | 0.551979 | 0.881154 | 0.1759 | 0.153559 | 0.964151 |
0 | train | H0 | H01 | 75 | 75 | 40 | 37.333333 | 33.333333 | 0.193052 | -0.282876 | 0.002319 | 0.002312 | 0.294966 | 1.346982 | 0.373713 | 0.193052 | 0.282876 | 0.718008 | 0.000005 | 0.019015 | 0.08007 | 0.149342 | 0.178146 | 0.290936 |
0 | train | H1 | H01 | 75 | 75 | 40 | 36 | 41.333333 | 1.048222 | -0.976854 | -1.15953 | -1.157057 | 1.377593 | 1.725894 | 1.356568 | -1.157057 | 1.15953 | 0.313346 | 1.338781 | 1.038827 | 0.950655 | 0.944314 | 0.901768 | 1.008226 |
0 | train | H0 | H01 | 100 | 100 | 40 | 40 | 35 | -0.272073 | 0.268015 | 0.233807 | 0.232673 | 0.086635 | 0.104832 | 0.08213 | 0.232673 | 0.268015 | 0.783942 | 0.054137 | 0.068741 | 0.07172 | 0.040821 | 0.036374 | -0.274185 |
0 | train | H1 | H01 | 100 | 100 | 40 | 46 | 32 | -0.172026 | 0.142872 | 0.277888 | 0.278845 | 0.119665 | 2.61963 | 0.175513 | 0.278845 | 0.277888 | 0.454059 | 0.077755 | 0.074936 | 0.02043 | 0.222506 | 0.246776 | -0.151409 |
0 | train | H0 | H01 | 150 | 150 | 40 | 48 | 39.333333 | -1.027352 | 0.908253 | 1.140545 | 1.138094 | 1.375021 | 1.659653 | 1.35797 | 1.138094 | 1.140545 | 0.350145 | 1.295258 | 0.992867 | 0.824218 | 1.363349 | 1.339973 | -0.934308 |
0 | train | H1 | H01 | 150 | 150 | 40 | 39.333333 | 43.333333 | 2.473141 | -2.719824 | -1.846737 | -1.865796 | 8.636487 | 14.535853 | 9.332607 | -1.865796 | 2.719824 | 0.002259 | 3.481194 | 5.539445 | 7.422138 | 0.117772 | 0.108651 | 2.827525 |
0 | train | H0 | H01 | 200 | 200 | 40 | 39.5 | 42 | 0.009842 | -0.050845 | 0.005465 | 0.005471 | 0.001151 | 0.620124 | 0.002631 | 0.005471 | 0.050845 | 0.89181 | 0.00003 | 0.000566 | 0.002584 | 0.006486 | 0.008035 | 0.050787 |
0 | train | H1 | H01 | 200 | 200 | 40 | 40.5 | 37.5 | 0.682409 | -0.926684 | -0.08553 | -0.085376 | 3.070526 | 6.073317 | 3.652822 | 0.682409 | 0.926684 | 0.108097 | 0.007289 | 0.287587 | 0.859103 | 0.960259 | 0.993233 | 0.954372 |
0 | train | H0 | H01 | 300 | 300 | 40 | 39.666667 | 39.666667 | -0.591453 | 0.619517 | 0.4215 | 0.420914 | 0.560674 | 0.584424 | 0.587417 | -0.591453 | 0.619517 | 0.524334 | 0.177169 | 0.292318 | 0.38373 | 0.00325 | 0.003551 | -0.636678 |
0 | train | H1 | H01 | 300 | 300 | 40 | 35 | 42.333333 | 4.104037 | -4.138024 | -3.655302 | -3.692567 | 17.227196 | 19.350506 | 17.683843 | 4.104037 | 4.138024 | 0.000021 | 13.635053 | 15.948764 | 17.261125 | 4.462179 | 4.515193 | 4.250005 |
0 | train | H0 | H01 | 500 | 500 | 40 | 40.6 | 39.6 | -0.136348 | 0.063744 | 0.200562 | 0.200433 | 0.110304 | 0.964665 | 0.073539 | -0.136348 | 0.200562 | 0.809801 | 0.040173 | 0.004152 | 0.004063 | 0.081676 | 0.072838 | -0.065329 |
0 | train | H1 | H01 | 500 | 500 | 40 | 39.4 | 38.2 | 2.206897 | -2.259526 | -1.941683 | -1.952218 | 4.614704 | 8.348032 | 5.084353 | -1.952218 | 2.259526 | 0.019046 | 3.811154 | 4.200542 | 5.119551 | 1.065405 | 1.053116 | 2.344626 |
0 | train | H0 | H01 | 1,000 | 1,000 | 40 | 40.3 | 39.1 | -0.388839 | 0.371515 | 0.363059 | 0.363072 | 0.165 | 0.375743 | 0.151665 | -0.388839 | 0.371515 | 0.702623 | 0.131821 | 0.161651 | 0.138004 | 0.061473 | 0.070642 | -0.381783 |
0 | train | H1 | H01 | 1,000 | 1,000 | 40 | 39.3 | 39.1 | 4.100974 | -4.780682 | -2.062354 | -2.065967 | 47.998175 | 55.070417 | 52.218613 | 4.100974 | 4.780682 | 0 | 4.26822 | 13.826907 | 22.921473 | 2.937615 | 2.911476 | 4.915471 |
Machine Learning for Two-Sample Testing under Right-Censored Data: A Simulation Study
- Petr PHILONENKO, Ph.D. in Computer Science;
- Sergey POSTOVALOV, D.Sc. in Computer Science.
The paper can be downloaded here.
About
This dataset is a supplement to the github repositiry and paper addressed to solve the two-sample problem under right-censored observations using Machine Learning. The problem statement can be formualted as H0: S1(t)=S2(t) versus H: S1(t)≠S_2(t) where S1(t) and S2(t) are survival functions of samples X1 and X2.
This dataset contains the synthetic data simulated by the Monte Carlo method and Inverse Transform Sampling.
Contents
Citing
@misc {petr_philonenko_2024,
author = { {Petr Philonenko} },
title = { ML_for_TwoSampleTesting (Revision a4ae672) },
year = 2024,
url = { https://huggingface.co/datasets/pfilonenko/ML_for_TwoSampleTesting },
doi = { 10.57967/hf/2978 },
publisher = { Hugging Face }
}
Repository
The files of this dataset have following structure:
data
├── 1_raw
│ └── two_sample_problem_dataset.tsv.gz (121,986,000 rows)
├── 2_samples
│ ├── sample_train.tsv.gz (24,786,000 rows)
│ └── sample_simulation.tsv.gz (97,200,000 rows)
└── 3_dataset_with_ML_pred
└── dataset_with_ML_pred.tsv.gz (97,200,000 rows)
- two_sample_problem_dataset.tsv.gz is a raw simulated data. In the github repositiry, this file must be located in the ML_for_TwoSampleTesting/proposed_ml_for_two_sample_testing/data/1_raw/
- sample_train.tsv.gz and sample_simulation.tsv.gz are train and test samples splited from the two_sample_problem_dataset.tsv.gz. In the github repositiry, these files must be located in the ML_for_TwoSampleTesting/proposed_ml_for_two_sample_testing/data/2_samples/
- dataset_with_ML_pred.tsv.gz is the test sample supplemented by the predictions of the proposed ML-methods. In the github repositiry, this file must be located in the ML_for_TwoSampleTesting/proposed_ml_for_two_sample_testing/data/3_dataset_with_ML_pred/
Fields
In these files, there are following fields:
- PARAMETERS OF SAMPLE SIMULATION
- iter is an iteration number of the Monte Carlo replication (in total, 37650);
- sample is a type of the sample (train, val, test). This field is used to split dataset into train-validate-test samples for ML-model training;
- H0_H1 is a true hypothesis: if H0, then samples X1 and X2 were simulated under S1(t)=S2(t); if H1, then samples X1 and X2 were simulated under S1(t)≠S2(t);
- Hi is an alternative (H01-H09, H11-H19, or H21-H29) with competing hypotheses S1(t) and S2(t). Detailed description of these alternatives can be found in the paper;
- n1 is the size of the sample 1;
- n2 is the size of the sample 2;
- perc is a set (expected) censoring rate for the samples 1 and 2;
- real_perc1 is an actual censoring rate of the sample 1;
- real_perc2 is an actual censoring rate of the sample 2;
- STATISTICS OF CLASSICAL TWO-SAMPLE TESTS
- Peto_test is a statistic of the Peto and Peto’s Generalized Wilcoxon test (which is computed on two samples under parameters described above);
- Gehan_test is a statistic of the Gehan’s Generalized Wilcoxon test;
- logrank_test is a statistic of the logrank test;
- CoxMantel_test is a statistic of the Cox-Mantel test;
- BN_GPH_test is a statistic of the Bagdonavičius-Nikulin test (Generalized PH model);
- BN_MCE_test is a statistic of the Bagdonavičius-Nikulin test (Multiple Crossing-Effect model);
- BN_SCE_test is a statistic of the Bagdonavičius-Nikulin test (Single Crossing-Effect model);
- Q_test is a statistic of the Q-test;
- MAX_Value_test is a statistic of the Maximum Value test;
- MIN3_test is a statistic of the MIN3 test;
- WLg_logrank_test is a statistic of the Weighted Logrank test (weighted function: 'logrank');
- WLg_TaroneWare_test is a statistic of the Weighted Logrank test (weighted function: 'Tarone-Ware');
- WLg_Breslow_test is a statistic of the Weighted Logrank test (weighted function: 'Breslow');
- WLg_PetoPrentice_test is a statistic of the Weighted Logrank test (weighted function: 'Peto-Prentice');
- WLg_Prentice_test is a statistic of the Weighted Logrank test (weighted function: 'Prentice');
- WKM_test is a statistic of the Weighted Kaplan-Meier test;
- STATISTICS OF THE PROPOSED ML-METHODS FOR TWO-SAMPLE PROBLEM
- CatBoost_test is a statistic of the proposed ML-method based on the CatBoost framework;
- XGBoost_test is a statistic of the proposed ML-method based on the XGBoost framework;
- LightAutoML_test is a statistic of the proposed ML-method based on the LightAutoML (LAMA) framework;
- SKLEARN_RF_test is a statistic of the proposed ML-method based on Random Forest (implemented in sklearn);
- SKLEARN_LogReg_test is a statistic of the proposed ML-method based on Logistic Regression (implemented in sklearn);
- SKLEARN_GB_test is a statistic of the proposed ML-method based on Gradient Boosting Machine (implemented in sklearn).
Simulation
For this dataset, the full source code (C++) is available here. It makes possible to reproduce and extend the simulation by the Monte Carlo method. Here, we present two fragments of the source code (main.cpp and simulation_for_machine_learning.h) which can help to understand the main steps of the simulation process.
main.cpp
#include"simulation_for_machine_learning.h"
// Select two-sample tests
vector<HomogeneityTest*> AllTests()
{
vector<HomogeneityTest*> D;
// ---- Classical Two-Sample tests for Uncensored Case ----
//D.push_back( new HT_AndersonDarlingPetitt );
//D.push_back( new HT_KolmogorovSmirnovTest );
//D.push_back( new HT_LehmannRosenblatt );
// ---- Two-Sample tests for Right-Censored Case ----
D.push_back( new HT_Peto );
D.push_back( new HT_Gehan );
D.push_back( new HT_Logrank );
D.push_back( new HT_BagdonaviciusNikulinGeneralizedCox );
D.push_back( new HT_BagdonaviciusNikulinMultiple );
D.push_back( new HT_BagdonaviciusNikulinSingle );
D.push_back( new HT_QTest ); //Q-test
D.push_back( new HT_MAX ); //Maximum Value test
D.push_back( new HT_SynthesisTest ); //MIN3 test
D.push_back( new HT_WeightedLogrank("logrank") );
D.push_back( new HT_WeightedLogrank("Tarone–Ware") );
D.push_back( new HT_WeightedLogrank("Breslow") );
D.push_back( new HT_WeightedLogrank("Peto–Prentice") );
D.push_back( new HT_WeightedLogrank("Prentice") );
D.push_back( new HT_WeightedKaplanMeyer );
return D;
}
// Example of two-sample testing using this code
void EXAMPLE_1(vector<HomogeneityTest*> &D)
{
// load the samples
Sample T1(".//samples//1Chemotherapy.txt");
Sample T2(".//samples//2Radiotherapy.txt");
// two-sample testing through selected tests
for(int j=0; j<D.size(); j++)
{
char test_name[512];
D[j]->TitleTest(test_name);
double Sn = D[j]->CalculateStatistic(T1, T2);
double pvalue = D[j]->p_value(T1, T2, 27000); // 27k in accodring to the Kolmogorov's theorem => simulation error MAX||G(S|H0)-Gn(S|H0)|| <= 0.01
printf("%s\n", &test_name);
printf("\t Sn: %lf\n", Sn);
printf("\t pv: %lf\n", pvalue);
printf("--------------------------------");
}
}
// Example of the dataset simulation for the proposed ML-method
void EXAMPLE_2(vector<HomogeneityTest*> &D)
{
// Run dataset (train or test sample) simulation (results in ".//to_machine_learning_2024//")
simulation_for_machine_learning sm(D);
}
// init point
int main()
{
// Set the number of threads
int k = omp_get_max_threads() - 1;
omp_set_num_threads( k );
// Select two-sample tests
auto D = AllTests();
// Example of two-sample testing using this code
EXAMPLE_1(D);
// Example of the dataset simulation for the proposed ML-method
EXAMPLE_2(D);
// Freeing memory
ClearMemory(D);
printf("The mission is completed.\n");
return 0;
}
simulation_for_machine_learning.h
#ifndef simulation_for_machine_learning_H
#define simulation_for_machine_learning_H
#include"HelpFucntions.h"
// Object of the data simulation for training of the proposed ML-method
class simulation_for_machine_learning{
private:
// p-value computation using the Test and Test Statistic (Sn)
double pvalue(double Sn, HomogeneityTest* Test)
{
auto f = Test->F( Sn );
double pv = 0;
if( Test->TestType().c_str() == "right" )
pv = 1.0 - f;
else
if( Test->TestType().c_str() == "left" )
pv = f;
else // "double"
pv = 2.0*min( f, 1-f );
return pv;
}
// Process of simulation
void Simulation(int iter, vector<HomogeneityTest*> &D, int rank, mt19937boost Gw)
{
// preparation the file to save
char file_to_save[512];
sprintf(file_to_save,".//to_machine_learning_2024//to_machine_learning[rank=%d].csv", rank);
// if it is the first iteration, the head of the table must be read
if( iter == 0 )
{
FILE *ou = fopen(file_to_save,"w");
fprintf(ou, "num;H0/H1;model;n1;n2;perc;real_perc1;real_perc2;");
for(int i=0; i<D.size(); i++)
{
char title_of_test[512];
D[i]->TitleTest(title_of_test);
fprintf(ou, "Sn [%s];p-value [%s];", title_of_test, title_of_test);
}
fprintf(ou, "\n");
fclose(ou);
}
// Getting list of the Alternative Hypotheses (H01 - H27)
vector<int> H;
int l = 1;
for(int i=100; i<940; i+=100) // Groups of Alternative Hypotheses (I, II, III, IV, V, VI, VII, VIII, IX)
{
for(int j=10; j<40; j+=10) // Alternative Hypotheses in the Group (e.g., H01, H02, H03 into the I and so on)
//for(int l=1; l<4; l++) // various families of distribution of censoring time F^C(t)
H.push_back( 1000+i+j+l );
}
// Sample sizes
vector<int> sample_sizes;
sample_sizes.push_back( 20 ); // n1 = n2 = 20
sample_sizes.push_back( 30 ); // n1 = n2 = 30
sample_sizes.push_back( 50 ); // n1 = n2 = 50
sample_sizes.push_back( 75 ); // n1 = n2 = 75
sample_sizes.push_back( 100 ); // n1 = n2 = 100
sample_sizes.push_back( 150 ); // n1 = n2 = 150
sample_sizes.push_back( 200 ); // n1 = n2 = 200
sample_sizes.push_back( 300 ); // n1 = n2 = 300
sample_sizes.push_back( 500 ); // n1 = n2 = 500
sample_sizes.push_back( 1000 ); // n1 = n2 = 1000
// Simulation (Getting H, Simulation samples, Computation of the test statistics & Save to file)
for(int i = 0; i<H.size(); i++)
{
int Hyp = H[i];
if(rank == 0)
printf("\tH = %d\n",Hyp);
for(int per = 0; per<51; per+=10)
{
// ---- Getting Hi ----
AlternativeHypotheses H0_1(Hyp,1,0), H0_2(Hyp,2,0);
AlternativeHypotheses H1_1(Hyp,1,per), H1_2(Hyp,2,per);
for(int jj=0; jj<sample_sizes.size(); jj++)
{
int n = sample_sizes[jj];
// ---- Simulation samples ----
//competing hypothesis H0
Sample A0(*H0_1.D,n,Gw);
Sample B0(*H0_1.D,n,Gw);
if( per > 0 )
{
A0.CensoredTypeThird(*H1_1.D,Gw);
B0.CensoredTypeThird(*H1_1.D,Gw);
}
//competing hypothesis H1
Sample A1(*H0_1.D,n,Gw);
Sample B1(*H0_2.D,n,Gw);
if( per > 0 )
{
A1.CensoredTypeThird(*H1_1.D,Gw);
B1.CensoredTypeThird(*H1_2.D,Gw);
}
// ---- Computation of the test statistics & Save to file ----
//Sn and p-value computation under H0
FILE *ou = fopen(file_to_save, "a");
auto perc1 = A0.RealCensoredPercent();
auto perc2 = B0.RealCensoredPercent();
fprintf(ou,"%d;", iter);
fprintf(ou,"H0;");
fprintf(ou,"%d;", Hyp);
fprintf(ou,"%d;%d;", n,n);
fprintf(ou,"%d;%lf;%lf", per, perc1, perc2);
for(int j=0; j<D.size(); j++)
{
auto Sn_H0 = D[j]->CalculateStatistic(A0, B0);
auto pv_H0 = 0.0; // skip computation (it prepares in ML-framework)
fprintf(ou, ";%lf;0", Sn_H0);
}
fprintf(ou, "\n");
//Sn and p-value computation under H1
perc1 = A1.RealCensoredPercent();
perc2 = B1.RealCensoredPercent();
fprintf(ou,"%d;", iter);
fprintf(ou,"H1;");
fprintf(ou,"%d;", Hyp);
fprintf(ou,"%d;%d;", n,n);
fprintf(ou,"%d;%lf;%lf", per, perc1, perc2);
for(int j=0; j<D.size(); j++)
{
auto Sn_H1 = D[j]->CalculateStatistic(A1, B1);
auto pv_H1 = 0.0; // skip computation (it prepares in ML-framework)
fprintf(ou, ";%lf;0", Sn_H1);
}
fprintf(ou, "\n");
fclose( ou );
}
}
}
}
public:
// Constructor of the class
simulation_for_machine_learning(vector<HomogeneityTest*> &D)
{
int N = 37650; // number of the Monte-Carlo replications
#pragma omp parallel for
for(int k=0; k<N; k++)
{
int rank = omp_get_thread_num();
auto gen = GwMT19937[rank];
if(rank == 0)
printf("\r%d", k);
Simulation(k, D, rank, gen);
}
}
};
#endif
- Downloads last month
- 35