jucamohedano commited on
Commit
0d44b47
1 Parent(s): 373c6ce

Add application and requirements.txt

Browse files
Files changed (2) hide show
  1. app.py +92 -0
  2. requirements.txt +2 -0
app.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import numpy as np
3
+ import matplotlib.pyplot as plt
4
+ import warnings
5
+
6
+ from functools import partial
7
+ from sklearn.datasets import make_blobs
8
+ from sklearn.svm import LinearSVC
9
+ from sklearn.inspection import DecisionBoundaryDisplay
10
+ from sklearn.exceptions import ConvergenceWarning
11
+
12
+ def train_model(C, n_samples):
13
+ default_base = {"n_samples": 20}
14
+
15
+ # Algorithms to compare
16
+ params = default_base.copy()
17
+ params.update({"n_samples":n_samples})
18
+
19
+ X, y = make_blobs(n_samples=params["n_samples"], centers=2, random_state=0)
20
+
21
+ fig, ax = plt.subplots()
22
+
23
+ # catch warnings related to convergence
24
+ with warnings.catch_warnings():
25
+ warnings.filterwarnings("ignore", category=ConvergenceWarning)
26
+
27
+ clf = LinearSVC(C=C, loss="hinge", random_state=42).fit(X, y)
28
+ # obtain the support vectors through the decision function
29
+ decision_function = clf.decision_function(X)
30
+ # we can also calculate the decision function manually
31
+ # decision_function = np.dot(X, clf.coef_[0]) + clf.intercept_[0]
32
+ # The support vectors are the samples that lie within the margin
33
+ # boundaries, whose size is conventionally constrained to 1
34
+ support_vector_indices = np.where(np.abs(decision_function) <= 1 + 1e-15)[0]
35
+ support_vectors = X[support_vector_indices]
36
+
37
+ ax.scatter(X[:, 0], X[:, 1], c=y, s=30, cmap=plt.cm.Paired)
38
+ DecisionBoundaryDisplay.from_estimator(
39
+ clf,
40
+ X,
41
+ ax=ax,
42
+ grid_resolution=50,
43
+ plot_method="contour",
44
+ colors="k",
45
+ levels=[-1, 0, 1],
46
+ alpha=0.5,
47
+ linestyles=["--", "-", "--"],
48
+ )
49
+ ax.scatter(
50
+ support_vectors[:, 0],
51
+ support_vectors[:, 1],
52
+ s=100,
53
+ linewidth=1,
54
+ facecolors="none",
55
+ edgecolors="k",
56
+ )
57
+ ax.set_title("C=" + str(C))
58
+
59
+ return fig
60
+
61
+ def iter_grid(n_rows, n_cols):
62
+ # create a grid using gradio Block
63
+ for _ in range(n_rows):
64
+ with gr.Row():
65
+ for _ in range(n_cols):
66
+ with gr.Column():
67
+ yield
68
+
69
+ title = "📈 Linear Support Vector Classification"
70
+ with gr.Blocks(title=title) as demo:
71
+ gr.Markdown(f"## {title}")
72
+ gr.Markdown("Unlike SVC (based on LIBSVM), LinearSVC "
73
+ + "(based on LIBLINEAR) does not provide the"
74
+ + "support vectors. This example demonstrates"
75
+ + "how to obtain the support vectors in LinearSVC.")
76
+
77
+
78
+ input_models = ["Bisecting K-Means", "K-Means"]
79
+
80
+ n_samples = gr.Slider(minimum=20, maximum=100, step=5,
81
+ label = "Number of Samples")
82
+
83
+ input_model = "LinearSVC"
84
+ # Regularization parameter C included in loop
85
+ for _, C in zip(iter_grid(1,2), [1, 100]):
86
+ plot = gr.Plot(label=input_model)
87
+
88
+ fn = partial(train_model, C)
89
+ n_samples.change(fn=fn, inputs=[n_samples], outputs=plot)
90
+
91
+
92
+ demo.launch()
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ scikit-learn
2
+ matplotlib