|
""" |
|
======================================================================================= |
|
Gradio demo to plot the decision surface of decision trees trained on the iris dataset |
|
======================================================================================= |
|
|
|
Plot the decision surface of a decision tree trained on pairs |
|
of features of the iris dataset. |
|
|
|
For each pair of iris features, the decision tree learns decision |
|
boundaries made of combinations of simple thresholding rules inferred from |
|
the training samples. |
|
|
|
We also show the tree structure of a model built on all of the features. |
|
|
|
Gradio demo created by Syed Affan <saffand03@gmail.com> |
|
""" |
|
from sklearn.datasets import load_iris |
|
from sklearn.tree import plot_tree |
|
import numpy as np |
|
import matplotlib.pyplot as plt |
|
import gradio as gr |
|
from sklearn.tree import DecisionTreeClassifier |
|
from sklearn.inspection import DecisionBoundaryDisplay |
|
|
|
|
|
iris = load_iris() |
|
|
|
def make_plot(criterion,max_depth,ccp_alpha): |
|
|
|
n_classes = 3 |
|
plot_colors = "ryb" |
|
plot_step = 0.02 |
|
|
|
fig_1 = plt.figure() |
|
|
|
for pairidx, pair in enumerate([[0, 1], [0, 2], [0, 3], [1, 2], [1, 3], [2, 3]]): |
|
|
|
X = iris.data[:, pair] |
|
y = iris.target |
|
|
|
|
|
clf = DecisionTreeClassifier(criterion=criterion,max_depth=max_depth,ccp_alpha=ccp_alpha) |
|
clf.fit(X, y) |
|
|
|
|
|
ax = plt.subplot(2, 3, pairidx + 1) |
|
plt.tight_layout(h_pad=0.5, w_pad=0.5, pad=2.5) |
|
DecisionBoundaryDisplay.from_estimator( |
|
clf, |
|
X, |
|
cmap=plt.cm.RdYlBu, |
|
response_method="predict", |
|
ax=ax, |
|
xlabel=iris.feature_names[pair[0]], |
|
ylabel=iris.feature_names[pair[1]], |
|
) |
|
|
|
|
|
for i, color in zip(range(n_classes), plot_colors): |
|
idx = np.where(y == i) |
|
plt.scatter( |
|
X[idx, 0], |
|
X[idx, 1], |
|
c=color, |
|
label=iris.target_names[i], |
|
cmap=plt.cm.RdYlBu, |
|
edgecolor="black", |
|
s=15, |
|
) |
|
|
|
plt.suptitle("Decision surface of decision trees trained on pairs of features") |
|
plt.legend(loc="lower right", borderpad=0, handletextpad=0) |
|
_ = plt.axis("tight") |
|
|
|
|
|
|
|
|
|
|
|
fig_2 = plt.figure() |
|
clf = DecisionTreeClassifier(criterion=criterion,max_depth=max_depth,ccp_alpha=ccp_alpha).fit(iris.data, iris.target) |
|
plot_tree(clf, filled=True) |
|
plt.title("Decision tree trained on all the iris features") |
|
return fig_1,fig_2 |
|
|
|
title = 'Plot the decision surface of decision trees trained on the iris dataset' |
|
|
|
model_card = f""" |
|
## Description: |
|
Plot the decision surface of a decision tree trained on pairs of features of the iris dataset. |
|
For each pair of iris features, the decision tree learns decision boundaries made of combinations of simple thresholding rules inferred from the training samples. |
|
We also show the tree structure of a model built on all of the features. |
|
## Dataset |
|
Iris Dataset |
|
""" |
|
|
|
with gr.Blocks(title=title) as demo: |
|
gr.Markdown(''' |
|
<div> |
|
<h1 style='text-align: center'>β Plot the decision surface of decision trees trained on the iris dataset π </h1> |
|
</div> |
|
''') |
|
gr.Markdown(model_card) |
|
gr.Markdown("Author: <a href=\"https://huggingface.co/sulpha\">sulpha</a>") |
|
with gr.Column(): |
|
d0 = gr.Radio(['gini', 'entropy', 'log_loss'],value='gini',label='Criterion') |
|
d1 = gr.Slider(1,10,step=1,value=5,label = 'max_depth') |
|
d2 = gr.Slider(0.0,1,step=0.001,value=0.0,label = 'ccp_alpha') |
|
|
|
btn = gr.Button(value= 'Submit') |
|
|
|
with gr.Row(): |
|
p_1 = gr.Plot() |
|
p_2 = gr.Plot() |
|
|
|
btn.click(make_plot,inputs=[d0,d1,d2],outputs=[p_1,p_2]) |
|
|
|
demo.launch() |