File size: 4,023 Bytes
f996981 a123c9b f996981 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 |
"""
=======================================================================================
Gradio demo to plot the decision surface of decision trees trained on the iris dataset
=======================================================================================
Plot the decision surface of a decision tree trained on pairs
of features of the iris dataset.
For each pair of iris features, the decision tree learns decision
boundaries made of combinations of simple thresholding rules inferred from
the training samples.
We also show the tree structure of a model built on all of the features.
Gradio demo created by Syed Affan <saffand03@gmail.com>
"""
from sklearn.datasets import load_iris
from sklearn.tree import plot_tree
import numpy as np
import matplotlib.pyplot as plt
import gradio as gr
from sklearn.tree import DecisionTreeClassifier
from sklearn.inspection import DecisionBoundaryDisplay
iris = load_iris()
def make_plot(criterion,max_depth,ccp_alpha):
# Parameters
n_classes = 3
plot_colors = "ryb"
plot_step = 0.02
fig_1 = plt.figure()
for pairidx, pair in enumerate([[0, 1], [0, 2], [0, 3], [1, 2], [1, 3], [2, 3]]):
# We only take the two corresponding features
X = iris.data[:, pair]
y = iris.target
# Train
clf = DecisionTreeClassifier(criterion=criterion,max_depth=max_depth,ccp_alpha=ccp_alpha)
clf.fit(X, y)
# Plot the decision boundary
ax = plt.subplot(2, 3, pairidx + 1)
plt.tight_layout(h_pad=0.5, w_pad=0.5, pad=2.5)
DecisionBoundaryDisplay.from_estimator(
clf,
X,
cmap=plt.cm.RdYlBu,
response_method="predict",
ax=ax,
xlabel=iris.feature_names[pair[0]],
ylabel=iris.feature_names[pair[1]],
)
# Plot the training points
for i, color in zip(range(n_classes), plot_colors):
idx = np.where(y == i)
plt.scatter(
X[idx, 0],
X[idx, 1],
c=color,
label=iris.target_names[i],
cmap=plt.cm.RdYlBu,
edgecolor="black",
s=15,
)
plt.suptitle("Decision surface of decision trees trained on pairs of features")
plt.legend(loc="lower right", borderpad=0, handletextpad=0)
_ = plt.axis("tight")
# %%
# Display the structure of a single decision tree trained on all the features
# together.
fig_2 = plt.figure()
clf = DecisionTreeClassifier(criterion=criterion,max_depth=max_depth,ccp_alpha=ccp_alpha).fit(iris.data, iris.target)
plot_tree(clf, filled=True)
plt.title("Decision tree trained on all the iris features")
return fig_1,fig_2
title = 'Plot the decision surface of decision trees trained on the iris dataset'
model_card = f"""
## Description:
Plot the decision surface of a decision tree trained on pairs of features of the iris dataset.
For each pair of iris features, the decision tree learns decision boundaries made of combinations of simple thresholding rules inferred from the training samples.
We also show the tree structure of a model built on all of the features.
## Dataset
Iris Dataset
"""
with gr.Blocks(title=title) as demo:
gr.Markdown('''
<div>
<h1 style='text-align: center'>⚒ Plot the decision surface of decision trees trained on the iris dataset 🛠</h1>
</div>
''')
gr.Markdown(model_card)
gr.Markdown("Author: <a href=\"https://huggingface.co/sulpha\">sulpha</a>")
with gr.Column():
d0 = gr.Radio(['gini', 'entropy', 'log_loss'],value='gini',label='Criterion')
d1 = gr.Slider(1,10,step=1,value=5,label = 'max_depth')
d2 = gr.Slider(0.0,1,step=0.001,value=0.0,label = 'ccp_alpha')
btn = gr.Button(value= 'Submit')
with gr.Row():
p_1 = gr.Plot()
p_2 = gr.Plot()
btn.click(make_plot,inputs=[d0,d1,d2],outputs=[p_1,p_2])
demo.launch() |