Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -70,7 +70,7 @@ def GRUModel(trainX, trainy, testX, testy, epochs=1000, batch_size=64, learning_
|
|
| 70 |
# Apply feature selection using Random Forest Regressor
|
| 71 |
if feature_selection:
|
| 72 |
# Use RandomForestRegressor to rank features by importance
|
| 73 |
-
rf = RandomForestRegressor(n_estimators=100, random_state=
|
| 74 |
rf.fit(trainX, trainy)
|
| 75 |
|
| 76 |
# Select features with importance greater than a threshold (e.g., mean importance)
|
|
@@ -159,7 +159,7 @@ def GRUModel(trainX, trainy, testX, testy, epochs=1000, batch_size=64, learning_
|
|
| 159 |
#-----------------------------------------------------------DeepMap-------------------------------------------------------------------------------
|
| 160 |
def CNNModel(trainX, trainy, testX, testy, epochs=1000, batch_size=64, learning_rate=0.0001, l1_reg=0.0001, l2_reg=0.0001, dropout_rate=0.3,feature_selection=True):
|
| 161 |
if feature_selection:
|
| 162 |
-
rf=RandomForestRegressor(n_estimators=100,random_state=
|
| 163 |
rf.fit(trainX,trainy)
|
| 164 |
|
| 165 |
selector=SelectFromModel(rf, threshold="mean",prefit=True)
|
|
@@ -227,7 +227,7 @@ def CNNModel(trainX, trainy, testX, testy, epochs=1000, batch_size=64, learning_
|
|
| 227 |
#-------------------------------------------------------------------------Random Forest----------------------------------------------------
|
| 228 |
def RFModel(trainX, trainy, testX, testy, n_estimators=100, max_depth=None,feature_selection=True):
|
| 229 |
if feature_selection:
|
| 230 |
-
rf=RandomForestRegressor(n_estimators=100, random_state=
|
| 231 |
rf.fit(trainX, trainy)
|
| 232 |
selector=SelectFromModel(rf, threshold="mean", prefit=True)
|
| 233 |
trainX=selector.transform(trainX)
|
|
@@ -257,7 +257,7 @@ def RFModel(trainX, trainy, testX, testy, n_estimators=100, max_depth=None,featu
|
|
| 257 |
#------------------------------------------------------------------------------XGboost---------------------------------------------------------------
|
| 258 |
def XGBoostModel(trainX, trainy, testX, testy,learning_rate,min_child_weight,feature_selection=True, n_estimators=100, max_depth=None):
|
| 259 |
if feature_selection:
|
| 260 |
-
rf=RandomForestRegressor(n_estimators=100,random_state=
|
| 261 |
rf.fit(trainX,trainy)
|
| 262 |
selector=SelectFromModel(rf,threshold="mean",prefit=True)
|
| 263 |
trainX=selector.transform(trainX)
|
|
@@ -381,7 +381,7 @@ def NestedKFoldCrossValidation(
|
|
| 381 |
|
| 382 |
# Feature selection
|
| 383 |
if feature_selection:
|
| 384 |
-
rf = RandomForestRegressor(n_estimators=
|
| 385 |
rf.fit(training_genotypic_data_merged, phenotypic_info)
|
| 386 |
selector = SelectFromModel(rf, threshold="mean", prefit=True)
|
| 387 |
training_genotypic_data_merged = selector.transform(training_genotypic_data_merged)
|
|
|
|
| 70 |
# Apply feature selection using Random Forest Regressor
|
| 71 |
if feature_selection:
|
| 72 |
# Use RandomForestRegressor to rank features by importance
|
| 73 |
+
rf = RandomForestRegressor(n_estimators=100, random_state=60)
|
| 74 |
rf.fit(trainX, trainy)
|
| 75 |
|
| 76 |
# Select features with importance greater than a threshold (e.g., mean importance)
|
|
|
|
| 159 |
#-----------------------------------------------------------DeepMap-------------------------------------------------------------------------------
|
| 160 |
def CNNModel(trainX, trainy, testX, testy, epochs=1000, batch_size=64, learning_rate=0.0001, l1_reg=0.0001, l2_reg=0.0001, dropout_rate=0.3,feature_selection=True):
|
| 161 |
if feature_selection:
|
| 162 |
+
rf=RandomForestRegressor(n_estimators=100,random_state=60)
|
| 163 |
rf.fit(trainX,trainy)
|
| 164 |
|
| 165 |
selector=SelectFromModel(rf, threshold="mean",prefit=True)
|
|
|
|
| 227 |
#-------------------------------------------------------------------------Random Forest----------------------------------------------------
|
| 228 |
def RFModel(trainX, trainy, testX, testy, n_estimators=100, max_depth=None,feature_selection=True):
|
| 229 |
if feature_selection:
|
| 230 |
+
rf=RandomForestRegressor(n_estimators=100, random_state=60)
|
| 231 |
rf.fit(trainX, trainy)
|
| 232 |
selector=SelectFromModel(rf, threshold="mean", prefit=True)
|
| 233 |
trainX=selector.transform(trainX)
|
|
|
|
| 257 |
#------------------------------------------------------------------------------XGboost---------------------------------------------------------------
|
| 258 |
def XGBoostModel(trainX, trainy, testX, testy,learning_rate,min_child_weight,feature_selection=True, n_estimators=100, max_depth=None):
|
| 259 |
if feature_selection:
|
| 260 |
+
rf=RandomForestRegressor(n_estimators=100,random_state=60)
|
| 261 |
rf.fit(trainX,trainy)
|
| 262 |
selector=SelectFromModel(rf,threshold="mean",prefit=True)
|
| 263 |
trainX=selector.transform(trainX)
|
|
|
|
| 381 |
|
| 382 |
# Feature selection
|
| 383 |
if feature_selection:
|
| 384 |
+
rf = RandomForestRegressor(n_estimators=1000, random_state=60)
|
| 385 |
rf.fit(training_genotypic_data_merged, phenotypic_info)
|
| 386 |
selector = SelectFromModel(rf, threshold="mean", prefit=True)
|
| 387 |
training_genotypic_data_merged = selector.transform(training_genotypic_data_merged)
|