vumichien commited on
Commit
cd417e9
1 Parent(s): deb8829

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +104 -0
app.py ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import numpy as np
3
+ from matplotlib import pyplot as plt
4
+
5
+ from sklearn import linear_model, datasets
6
+
7
+
8
+ theme = gr.themes.Monochrome(
9
+ primary_hue="indigo",
10
+ secondary_hue="blue",
11
+ neutral_hue="slate",
12
+ )
13
+ model_card = f"""
14
+ ## Description
15
+
16
+ **Random sample consensus (RANSAC)** is a method to estimate a mathematical model from a set of observed data that may have some wrong information.
17
+ The number of times it tries affects how likely it is to get a good answer. **RANSAC** is commonly used in photogrammetry to solve problems with linear or non-linear regression.
18
+ It works by separating the input data into two groups: inliers (which may have some noise) and outliers (which are wrong data). It estimates the model only using the inliers.
19
+ In this demo, a simulation regression dataset with noise is created, and then compare the results of fitting data in **Linear model** and **RANSAC**.
20
+ You can play around with different ``number of samples`` and ``number of outliers`` to see the effect
21
+
22
+ ## Dataset
23
+
24
+ Simulation dataset
25
+ """
26
+
27
+
28
+ def do_train(n_samples, n_outliers):
29
+
30
+ X, y, coef = datasets.make_regression(
31
+ n_samples=n_samples,
32
+ n_features=1,
33
+ n_informative=1,
34
+ noise=10,
35
+ coef=True,
36
+ random_state=0,
37
+ )
38
+
39
+ # Add outlier data
40
+ np.random.seed(0)
41
+ X[:n_outliers] = 3 + 0.5 * np.random.normal(size=(n_outliers, 1))
42
+ y[:n_outliers] = -3 + 10 * np.random.normal(size=n_outliers)
43
+
44
+ # Fit line using all data
45
+ lr = linear_model.LinearRegression()
46
+ lr.fit(X, y)
47
+
48
+ # Robustly fit linear model with RANSAC algorithm
49
+ ransac = linear_model.RANSACRegressor()
50
+ ransac.fit(X, y)
51
+ inlier_mask = ransac.inlier_mask_
52
+ outlier_mask = np.logical_not(inlier_mask)
53
+
54
+ # Predict data of estimated models
55
+ line_X = np.arange(X.min(), X.max())[:, np.newaxis]
56
+ line_y = lr.predict(line_X)
57
+ line_y_ransac = ransac.predict(line_X)
58
+
59
+ text = f"True coefficients: {coef:.4f}. Linear regression coefficients: {lr.coef_[0]:.4f}. RANSAC coefficients: {ransac.estimator_.coef_[0]:.4f}"
60
+
61
+ fig, axes = plt.subplots()
62
+
63
+ axes.scatter(
64
+ X[inlier_mask], y[inlier_mask], color="yellowgreen", marker=".", label="Inliers"
65
+ )
66
+ axes.scatter(
67
+ X[outlier_mask], y[outlier_mask], color="gold", marker=".", label="Outliers"
68
+ )
69
+ axes.plot(line_X, line_y, color="navy", linewidth=2, label="Linear regressor")
70
+ axes.plot(
71
+ line_X,
72
+ line_y_ransac,
73
+ color="cornflowerblue",
74
+ linewidth=2,
75
+ label="RANSAC regressor",
76
+ )
77
+ axes.legend(loc="lower right")
78
+ axes.set_xlabel("Input")
79
+ axes.set_ylabel("Response")
80
+
81
+ return fig, text
82
+
83
+
84
+
85
+ with gr.Blocks(theme=theme) as demo:
86
+ gr.Markdown('''
87
+ <div>
88
+ <h1 style='text-align: center'>Robust linear model estimation using RANSAC</h1>
89
+ </div>
90
+ ''')
91
+ gr.Markdown(model_card)
92
+ gr.Markdown("Author: <a href=\"https://huggingface.co/vumichien\">Vu Minh Chien</a>. Based on the example from <a href=\"https://scikit-learn.org/stable/auto_examples/linear_model/plot_ransac.html#sphx-glr-auto-examples-linear-model-plot-ransac-py\">scikit-learn</a>")
93
+ n_samples = gr.Slider(minimum=500, maximum=5000, step=500, value=500, label="Number of samples")
94
+ n_outliers = gr.Slider(minimum=25, maximum=250, step=25, value=25, label="Number of outliers")
95
+ with gr.Row():
96
+ with gr.Column():
97
+ plot = gr.Plot(label="Compare Linear regressor and RANSAC")
98
+ with gr.Column():
99
+ results = gr.Textbox(label="Results")
100
+
101
+ n_samples.change(fn=do_train, inputs=[n_samples, n_outliers], outputs=[plot, results])
102
+ n_outliers.change(fn=do_train, inputs=[n_samples, n_outliers], outputs=[plot, results])
103
+
104
+ demo.launch()