Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import matplotlib.pyplot as plt
|
3 |
+
from sklearn.datasets import make_classification
|
4 |
+
from sklearn.model_selection import train_test_split
|
5 |
+
from sklearn.preprocessing import StandardScaler
|
6 |
+
from sklearn.ensemble import RandomForestClassifier
|
7 |
+
from sklearn.pipeline import make_pipeline
|
8 |
+
from sklearn.svm import LinearSVC
|
9 |
+
from sklearn.metrics import DetCurveDisplay, RocCurveDisplay
|
10 |
+
|
11 |
+
def generate_synthetic_data(n_samples, n_features, n_redundant, n_informative, random_state, n_clusters_per_class):
|
12 |
+
X, y = make_classification(
|
13 |
+
n_samples=n_samples,
|
14 |
+
n_features=n_features,
|
15 |
+
n_redundant=n_redundant,
|
16 |
+
n_informative=n_informative,
|
17 |
+
random_state=random_state,
|
18 |
+
n_clusters_per_class=n_clusters_per_class,
|
19 |
+
)
|
20 |
+
return X, y
|
21 |
+
|
22 |
+
def plot_roc_det_curves(classifier_names, svm_c, rf_max_depth, rf_n_estimators, rf_max_features,
|
23 |
+
n_samples, n_features, n_redundant, n_informative, random_state, n_clusters_per_class):
|
24 |
+
X, y = generate_synthetic_data(n_samples, n_features, n_redundant, n_informative, random_state, n_clusters_per_class)
|
25 |
+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4, random_state=0)
|
26 |
+
|
27 |
+
classifiers = {
|
28 |
+
"Linear SVM": make_pipeline(StandardScaler(), LinearSVC(C=svm_c)),
|
29 |
+
"Random Forest": RandomForestClassifier(
|
30 |
+
max_depth=rf_max_depth, n_estimators=rf_n_estimators, max_features=rf_max_features
|
31 |
+
),
|
32 |
+
}
|
33 |
+
|
34 |
+
fig, [ax_roc, ax_det] = plt.subplots(1, 2, figsize=(11, 5))
|
35 |
+
|
36 |
+
for classifier_name in classifier_names:
|
37 |
+
clf = classifiers[classifier_name]
|
38 |
+
clf.fit(X_train, y_train)
|
39 |
+
RocCurveDisplay.from_estimator(clf, X_test, y_test, ax=ax_roc, name=classifier_name)
|
40 |
+
DetCurveDisplay.from_estimator(clf, X_test, y_test, ax=ax_det, name=classifier_name)
|
41 |
+
|
42 |
+
ax_roc.set_title("Receiver Operating Characteristic (ROC) curves")
|
43 |
+
ax_det.set_title("Detection Error Tradeoff (DET) curves")
|
44 |
+
|
45 |
+
ax_roc.grid(linestyle="--")
|
46 |
+
ax_det.grid(linestyle="--")
|
47 |
+
|
48 |
+
plt.legend()
|
49 |
+
plt.tight_layout()
|
50 |
+
|
51 |
+
return plt
|
52 |
+
|
53 |
+
parameters = [
|
54 |
+
gr.inputs.CheckboxGroup(["Linear SVM", "Random Forest"], label="Classifiers"),
|
55 |
+
gr.inputs.Slider(0.001, 0.1, step=0.001, default=0.025, label="Linear SVM C"),
|
56 |
+
gr.inputs.Slider(1, 10, step=1, default=5, label="Random Forest Max Depth"),
|
57 |
+
gr.inputs.Slider(1, 20, step=1, default=10, label="Random Forest n_estimators"),
|
58 |
+
gr.inputs.Slider(1, 10, step=1, default=1, label="Random Forest max_features"),
|
59 |
+
gr.inputs.Slider(100, 2000, step=100, default=1000, label="Number of Samples"),
|
60 |
+
gr.inputs.Slider(1, 10, step=1, default=2, label="Number of Features"),
|
61 |
+
gr.inputs.Slider(0, 10, step=1, default=0, label="Number of Redundant Features"),
|
62 |
+
gr.inputs.Slider(1, 10, step=1, default=2, label="Number of Informative Features"),
|
63 |
+
gr.inputs.Slider(0, 100, step=1, default=1, label="Random State"),
|
64 |
+
gr.inputs.Slider(1, 10, step=1, default=1, label="Number of Clusters per Class"),
|
65 |
+
]
|
66 |
+
|
67 |
+
examples = [
|
68 |
+
[
|
69 |
+
["Linear SVM"],
|
70 |
+
0.025,
|
71 |
+
5,
|
72 |
+
10,
|
73 |
+
1,
|
74 |
+
1000,
|
75 |
+
2,
|
76 |
+
0,
|
77 |
+
2,
|
78 |
+
1,
|
79 |
+
1,
|
80 |
+
],
|
81 |
+
[
|
82 |
+
["Random Forest"],
|
83 |
+
0.025,
|
84 |
+
5,
|
85 |
+
10,
|
86 |
+
1,
|
87 |
+
1000,
|
88 |
+
2,
|
89 |
+
0,
|
90 |
+
2,
|
91 |
+
1,
|
92 |
+
1,
|
93 |
+
],
|
94 |
+
[
|
95 |
+
["Linear SVM", "Random Forest"],
|
96 |
+
0.025,
|
97 |
+
5,
|
98 |
+
10,
|
99 |
+
1,
|
100 |
+
1000,
|
101 |
+
2,
|
102 |
+
0,
|
103 |
+
2,
|
104 |
+
1,
|
105 |
+
1,
|
106 |
+
]
|
107 |
+
]
|
108 |
+
|
109 |
+
iface = gr.Interface(title = "Detection error tradeoff (DET) curve", fn=plot_roc_det_curves, inputs=parameters, outputs="plot", description="In this example, we compare two binary classification multi-threshold metrics: the Receiver Operating Characteristic (ROC) and the Detection Error Tradeoff (DET). For such purpose, we evaluate two different classifiers for the same classification task. See the original scikit-learn example here: https://scikit-learn.org/stable/auto_examples/model_selection/plot_det.html")
|
110 |
+
iface.launch()
|