File size: 1,922 Bytes
b8055f6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
# -*- coding: utf-8 -*-
"""effect of eta and iterations on sgd.ipynb

Automatically generated by Colaboratory.

Original file is located at
    https://colab.research.google.com/drive/1Lso8y1XapdHGJOHnY0pL4ZeSGaa-6lOz
"""

# Commented out IPython magic to ensure Python compatibility.
import numpy as np
import matplotlib.pyplot as plt
# %matplotlib inline
plt.rcParams['figure.figsize'] = (10, 5)

def generate_data():
  X = 2 * np.random.rand(100, 1)
  y = 4 + 3 * X + np.random.randn(100, 1)
  return X, y

def get_norm_eqn(X, y):
  X_b = np.c_[np.ones((100, 1)), X] 
  theta_best = np.linalg.inv(X_b.T.dot(X_b)).dot(X_b.T).dot(y)
  y_norm = X_b.dot(theta_best)
  return X_b, y_norm

def generate_sgd_plot(eta, n_iterations):
  #initialize parameters
  m = 100
  theta = np.random.randn(2,1)
  X, y = generate_data()
  X_b, y_norm = get_norm_eqn(X, y)

  # plot how the parameters change wrt normal line
  # as the algorithm learns
  plt.scatter(X,y, c='#7678ed', label="data points")
  plt.axis([0, 2.0, 0, 14])
  for iteration in range(n_iterations):
    gradients = 2/m * X_b.T.dot(X_b.dot(theta) - y)
    theta = theta - eta * gradients
    y_new = X_b.dot(theta)
    plt.plot(X, y_new, color='#f18701', linestyle='dashed', linewidth=0.2)
  plt.plot(X, y_norm, '#3d348b', label="Normal Eqation line")
  plt.xlabel('X')
  plt.ylabel('Y')
  plt.legend(loc='best')
  return plt



import gradio as gr

demo = gr.Blocks()

with demo:
    gr.Markdown(
        """
    # How learning rate and number of iterations affect SGD
    Move sliders to change the values of eta and number of iterations to see how it affects the convergance rate of algorithm.
    """
    )
    inputs = [gr.Slider(0.02, 0.5, label="learning rate, eta"), gr.Slider(500, 1000, 200, label="number of iterations")]
    output = gr.Plot()

    btn = gr.Button("Run")
    btn.click(fn=generate_sgd_plot, inputs=inputs, outputs=output)

demo.launch()