haizad commited on
Commit
ac34f15
1 Parent(s): ce5b6d5

initial commit

Browse files
Files changed (3) hide show
  1. LICENSE +21 -0
  2. app.py +47 -0
  3. requirements.txt +2 -0
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2023 Mohammad Haizad
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
app.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import numpy as np
3
+ from sklearn.ensemble import AdaBoostRegressor
4
+ from sklearn.tree import DecisionTreeRegressor
5
+ import matplotlib
6
+ matplotlib.use("Agg")
7
+ import matplotlib.pyplot as plt
8
+ import seaborn as sns
9
+
10
+ def train_estimators(max_depth,n_estimators):
11
+ rng = np.random.RandomState(1)
12
+ X = np.linspace(0, 6, 100)[:, np.newaxis]
13
+ y = np.sin(X).ravel() + np.sin(6 * X).ravel() + rng.normal(0, 0.1, X.shape[0])
14
+
15
+ regr_1 = DecisionTreeRegressor(max_depth=4)
16
+ regr_2 = AdaBoostRegressor(
17
+ DecisionTreeRegressor(max_depth=max_depth), n_estimators=n_estimators, random_state=rng
18
+ )
19
+ regr_1.fit(X, y)
20
+ regr_2.fit(X, y)
21
+ y_1 = regr_1.predict(X)
22
+ y_2 = regr_2.predict(X)
23
+ colors = sns.color_palette("colorblind")
24
+
25
+ fig, ax = plt.subplots()
26
+ ax.scatter(X, y, color=colors[0], label="training samples")
27
+ ax.plot(X, y_1, color=colors[1], label="Decision tree (max_depth=4)", linewidth=2)
28
+ ax.plot(X, y_2, color=colors[2], label=f"Adaboost (max_depth={max_depth}, estimators={n_estimators})", linewidth=2)
29
+ ax.set_xlabel("data")
30
+ ax.set_ylabel("target")
31
+ ax.legend()
32
+ return fig
33
+
34
+ title = "Decision Tree Regression with AdaBoost"
35
+ with gr.Blocks(title=title) as demo:
36
+ gr.Markdown(f"## {title}")
37
+ gr.Markdown("This app demonstrates bosting of decision tree regressor using Adaboost")
38
+
39
+ max_depth = gr.Slider(minimum=1, maximum=50, step=1, label = "Maximum Depth")
40
+ n_estimators = gr.Slider(minimum=1, maximum=300, step=1, label = "Number of Estimators")
41
+
42
+ plot = gr.Plot(label=title)
43
+ n_estimators.change(fn=train_estimators, inputs=[max_depth,n_estimators], outputs=[plot])
44
+ max_depth.change(fn=train_estimators, inputs=[max_depth,n_estimators], outputs=[plot])
45
+
46
+ demo.launch()
47
+
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ scikit-learn==1.2.2
2
+ seaborn==0.12.2