zoya23 commited on
Commit
26f0716
·
verified ·
1 Parent(s): 0262264

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -16
app.py CHANGED
@@ -3,23 +3,22 @@ import numpy as np
3
  import plotly.graph_objs as go
4
  import sympy as sp
5
 
6
- # Streamlit Page Configuration
7
  st.set_page_config(page_title="Gradient Descent Visualizer", layout="wide")
8
 
9
- # Sidebar Inputs
10
  st.sidebar.header("Gradient Descent Settings")
11
  func_input = st.sidebar.text_input("Enter a function (use 'x'):", "x**2")
12
  learning_rate = st.sidebar.number_input("Learning Rate", min_value=0.001, max_value=1.0, value=0.1, step=0.01)
13
  initial_x = st.sidebar.number_input("Initial X", min_value=-10.0, max_value=10.0, value=5.0, step=0.1)
14
 
15
- # Reset Session State When Function Changes
16
  if "previous_func" not in st.session_state or st.session_state.previous_func != func_input:
17
  st.session_state.current_x = initial_x
18
  st.session_state.iteration = 0
19
  st.session_state.path = [(initial_x, 0)]
20
  st.session_state.previous_func = func_input
21
 
22
- # Symbolic Computation
23
  x = sp.symbols('x')
24
  try:
25
  func = sp.sympify(func_input)
@@ -30,28 +29,28 @@ except Exception as e:
30
  st.error(f"Invalid function: {e}")
31
  st.stop()
32
 
33
- # Gradient Descent Step
34
  def step_gradient_descent(current_x, lr):
35
  grad = derivative_np(current_x)
36
  next_x = current_x - lr * grad
37
  return next_x, grad
38
 
39
- # Perform Next Iteration
40
  if st.sidebar.button("Next Iteration"):
41
  next_x, _ = step_gradient_descent(st.session_state.current_x, learning_rate)
42
  st.session_state.path.append((st.session_state.current_x, func_np(st.session_state.current_x)))
43
  st.session_state.current_x = next_x
44
  st.session_state.iteration += 1
45
 
46
- # Calculate Actual Minima
47
  critical_points = sp.solve(derivative, x)
48
  actual_minima = [p.evalf() for p in critical_points if derivative_np(p) == 0 and sp.diff(derivative, x).evalf(subs={x: p}) > 0]
49
 
50
- # Generate Graph Data
51
  x_vals = np.linspace(-15, 15, 1000)
52
  y_vals = func_np(x_vals)
53
 
54
- # Plotly Visualization
55
  fig = go.Figure()
56
 
57
  # Function Plot
@@ -71,12 +70,11 @@ fig.add_trace(go.Scatter(
71
  hoverinfo='none'
72
  ))
73
 
74
- # Highlight Current Point
75
  fig.add_trace(go.Scatter(
76
  x=[st.session_state.current_x], y=[func_np(st.session_state.current_x)],
77
  mode='markers', marker=dict(color='orange', size=12),
78
- name="Current Point", hoverinfo='none'
79
- ))
80
 
81
  # Highlight Actual Minima
82
  if actual_minima:
@@ -118,18 +116,18 @@ fig.update_layout(
118
  ),
119
  showlegend=False,
120
  hovermode="closest",
121
- dragmode="pan", # Corrected line: removed extra space
122
  autosize=True,
123
  )
124
 
125
- # Fullscreen and Export Options
126
  st.markdown("### Gradient Descent Visualization")
127
  st.plotly_chart(fig, use_container_width=True)
128
 
129
- # Display Current Point
130
  st.write(f"**Current Point (x):** {st.session_state.current_x:.4f}")
131
 
132
- # Display Iteration History below the graph
133
  st.write("### Iteration History:")
134
  for i, (x_val, _) in enumerate(st.session_state.path):
135
  st.write(f"Iteration {i+1}: x = {x_val:.4f}")
 
3
  import plotly.graph_objs as go
4
  import sympy as sp
5
 
 
6
  st.set_page_config(page_title="Gradient Descent Visualizer", layout="wide")
7
 
8
+ # inputs
9
  st.sidebar.header("Gradient Descent Settings")
10
  func_input = st.sidebar.text_input("Enter a function (use 'x'):", "x**2")
11
  learning_rate = st.sidebar.number_input("Learning Rate", min_value=0.001, max_value=1.0, value=0.1, step=0.01)
12
  initial_x = st.sidebar.number_input("Initial X", min_value=-10.0, max_value=10.0, value=5.0, step=0.1)
13
 
14
+ # Reset when functin changes
15
  if "previous_func" not in st.session_state or st.session_state.previous_func != func_input:
16
  st.session_state.current_x = initial_x
17
  st.session_state.iteration = 0
18
  st.session_state.path = [(initial_x, 0)]
19
  st.session_state.previous_func = func_input
20
 
21
+ # Symbolic computation
22
  x = sp.symbols('x')
23
  try:
24
  func = sp.sympify(func_input)
 
29
  st.error(f"Invalid function: {e}")
30
  st.stop()
31
 
32
+ # Gradient Descent Function
33
  def step_gradient_descent(current_x, lr):
34
  grad = derivative_np(current_x)
35
  next_x = current_x - lr * grad
36
  return next_x, grad
37
 
38
+ # for next itertions
39
  if st.sidebar.button("Next Iteration"):
40
  next_x, _ = step_gradient_descent(st.session_state.current_x, learning_rate)
41
  st.session_state.path.append((st.session_state.current_x, func_np(st.session_state.current_x)))
42
  st.session_state.current_x = next_x
43
  st.session_state.iteration += 1
44
 
45
+ # Calculate actual minima
46
  critical_points = sp.solve(derivative, x)
47
  actual_minima = [p.evalf() for p in critical_points if derivative_np(p) == 0 and sp.diff(derivative, x).evalf(subs={x: p}) > 0]
48
 
49
+ # Generate graph
50
  x_vals = np.linspace(-15, 15, 1000)
51
  y_vals = func_np(x_vals)
52
 
53
+
54
  fig = go.Figure()
55
 
56
  # Function Plot
 
70
  hoverinfo='none'
71
  ))
72
 
73
+ # Highlight the current point on graph
74
  fig.add_trace(go.Scatter(
75
  x=[st.session_state.current_x], y=[func_np(st.session_state.current_x)],
76
  mode='markers', marker=dict(color='orange', size=12),
77
+ name="Current Point", hoverinfo='none'))
 
78
 
79
  # Highlight Actual Minima
80
  if actual_minima:
 
116
  ),
117
  showlegend=False,
118
  hovermode="closest",
119
+ dragmode="pan", #removed extra space
120
  autosize=True,
121
  )
122
 
123
+
124
  st.markdown("### Gradient Descent Visualization")
125
  st.plotly_chart(fig, use_container_width=True)
126
 
127
+ #current point
128
  st.write(f"**Current Point (x):** {st.session_state.current_x:.4f}")
129
 
130
+ #iteration history
131
  st.write("### Iteration History:")
132
  for i, (x_val, _) in enumerate(st.session_state.path):
133
  st.write(f"Iteration {i+1}: x = {x_val:.4f}")