EdTechFinalPrototype / final_graph.py
rgaddamanugu3's picture
Update final_graph.py
579fb08 verified
import numpy as np
import matplotlib.pyplot as plt
import gradio as gr
def plot_secant(h):
x = np.linspace(-1, 2, 400)
y = x**2
m = (h**2) / h
fig, axs = plt.subplots(1, 2, figsize=(8, 4))
for ax in axs:
ax.set_xlim(-1, 2)
ax.set_ylim(-1, 4)
ax.set_xticks(np.arange(-1, 3, 1))
ax.set_yticks(np.arange(-1, 5, 1))
ax.grid(True, linestyle='--', linewidth=0.5, color='lightgray')
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
axs[0].plot(x, y, color='black')
axs[0].plot([0, h], [0, h**2], color='red', linewidth=2)
axs[0].scatter([0, h], [0, h**2], color='red', zorder=5)
axs[1].plot(x, m * x, color='red', linewidth=2)
plt.tight_layout()
return fig
def plot_tangent(x0):
x = np.linspace(-1, 2, 400)
y = x**2
m = 2 * x0
y0 = x0**2
fig, axs = plt.subplots(1, 2, figsize=(8, 4))
for ax in axs:
ax.set_xlim(-1, 2)
ax.set_ylim(-1, 4)
ax.set_xticks(np.arange(-1, 3, 1))
ax.set_yticks(np.arange(-1, 5, 1))
ax.grid(True, linestyle='--', linewidth=0.5, color='lightgray')
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
axs[0].plot(x, y, color='black')
axs[0].plot(x, m * (x - x0) + y0, color='red', linewidth=2)
axs[0].scatter([x0], [y0], color='red', zorder=5)
axs[1].plot(x, 2 * x, color='black')
axs[1].scatter([x0], [m], color='red', zorder=5)
plt.tight_layout()
return fig
def plot_gradient_descent(lr, init_x, steps):
n = int(steps)
path = [init_x]
for _ in range(n):
path.append(path[-1] - lr * 2 * path[-1])
xv = np.array(path)
fig, axs = plt.subplots(1, 2, figsize=(8, 4))
x_plot = np.linspace(-2, 2, 400)
axs[0].plot(x_plot, x_plot**2, color='black')
axs[0].plot(xv, xv**2, marker='o', color='red', linewidth=2)
for i in range(n):
axs[0].annotate('', xy=(xv[i+1], xv[i+1]**2), xytext=(xv[i], xv[i]**2), arrowprops=dict(arrowstyle='->', color='red'))
axs[0].set_xlim(-2, 2)
axs[0].set_ylim(-0.5, 5)
axs[0].set_title('Gradient Descent Path')
axs[0].grid(True, linestyle='--', linewidth=0.5, color='lightgray')
axs[1].plot(range(n+1), xv, marker='o', color='red', linewidth=2)
for i in range(n):
axs[1].annotate('', xy=(i+1, xv[i+1]), xytext=(i, xv[i]), arrowprops=dict(arrowstyle='->', color='red'))
axs[1].set_xlim(0, n)
axs[1].set_ylim(xv.min() - 0.5, xv.max() + 0.5)
axs[1].set_xticks(range(0, n+1, max(1, n//5)))
axs[1].set_xlabel('Iteration')
axs[1].set_title('x over Iterations')
axs[1].grid(True, linestyle='--', linewidth=0.5, color='lightgray')
plt.tight_layout()
return fig
def plot_chain_network(x):
y = 2 * x
z = 3 * y
L = 4 * z
fig, ax = plt.subplots(figsize=(6, 2))
ax.axis('off')
pos = {'x': 0.1, 'y': 0.3, 'z': 0.5, 'L': 0.7}
for name in pos:
ax.add_patch(plt.Circle((pos[name], 0.5), 0.05, fill=False))
ax.text(pos[name], 0.5, name, ha='center', va='center')
for src, dst, lbl in [
('x', 'y', r'$\partial y/\partial x=2$'),
('y', 'z', r'$\partial z/\partial y=3$'),
('z', 'L', r'$\partial L/\partial z=4$')
]:
sx, dx = pos[src], pos[dst]
ax.annotate('', xy=(dx, 0.5), xytext=(sx, 0.5), arrowprops=dict(arrowstyle='->'))
ax.text((sx + dx) / 2, 0.6, lbl, ha='center', va='center')
ax.text(0.02, 0.15,
r'$\frac{\partial L}{\partial x}=\frac{\partial L}{\partial z}\cdot\frac{\partial z}{\partial y}\cdot\frac{\partial y}{\partial x}$',
transform=ax.transAxes, ha='left')
ax.text(0.02, 0.02, r'$=4\times3\times2=24$', transform=ax.transAxes, ha='left')
for name, val in [('x', x), ('y', y), ('z', z), ('L', L)]:
ax.text(pos[name], 0.3, f"{name}={val:.2f}", ha='center')
plt.tight_layout()
return fig
def plot_backprop_dnn(x, w1, w2, t):
a = w1 * x
y = w2 * a
L = 0.5 * (y - t)**2
fig, ax = plt.subplots(figsize=(6, 2))
ax.axis('off')
pos = {'x': 0.1, 'a': 0.3, 'y': 0.5, 'L': 0.7}
for name in pos:
ax.add_patch(plt.Circle((pos[name], 0.5), 0.05, fill=False))
ax.text(pos[name], 0.5, name, ha='center', va='center')
for src, dst, lbl in [
('x', 'a', r'$\partial a/\partial x=w_1$'),
('a', 'y', r'$\partial y/\partial a=w_2$'),
('y', 'L', r'$\partial L/\partial y=(y-t)$')
]:
sx, dx = pos[src], pos[dst]
ax.annotate('', xy=(dx, 0.5), xytext=(sx, 0.5), arrowprops=dict(arrowstyle='->'))
ax.text((sx + dx) / 2, 0.6, lbl, ha='center', va='center')
ax.text(0.02, 0.15, r'$\partial L/\partial w_2=(y-t)\cdot a$', transform=ax.transAxes, ha='left')
ax.text(0.02, 0.02, r'$\partial L/\partial w_1=(y-t)\cdot w_2\cdot x$', transform=ax.transAxes, ha='left')
for name, val in [('x', x), ('a', a), ('y', y), ('L', L)]:
ax.text(pos[name], 0.3, f"{name}={val:.2f}", ha='center')
plt.tight_layout()
return fig
def update_secant(h): return plot_secant(h), f'**Δx=h={h:.4f}**, (f(x+h)-f(x))/h={(h**2)/h:.4f}'
def update_tangent(x0): return plot_tangent(x0), f'**x={x0:.2f}**, dy/dx={2*x0:.2f}'
def update_gd(lr, init_x, steps): return plot_gradient_descent(lr, init_x, steps), f'lr={lr:.2f}, init={init_x:.2f}, steps={int(steps)}'
def update_chain(x):
msg = (f"**Current values:**\n"
f"- y = 2·{x:.2f} = {2*x:.2f}\n"
f"- z = 3·{2*x:.2f} = {3*(2*x):.2f}\n"
f"- L = 4·{3*(2*x):.2f} = {4*(3*(2*x)):.2f}\n\n"
"**Chain Rule:** dL/dx = 4 × 3 × 2 = 24")
return plot_chain_network(x), msg
def update_bp(x, w1, w2, t): return plot_backprop_dnn(x, w1, w2, t), ''
def load_secant(): return plot_secant(0.01), "**Hint:** try moving the slider!"
def load_tangent(): return plot_tangent(0.0), "**Hint:** try moving the slider!"
def load_gd(): return plot_gradient_descent(0.1, 1.0, 10), "**Hint:** try moving the sliders!"
def load_chain(): return plot_chain_network(1.0), "**Hint:** try moving the slider!"
def load_bp(): return plot_backprop_dnn(0.5, 1.0, 1.0, 0.0), "**Hint:** try moving the sliders!"
def reset_all(): return (
gr.update(value=0.01), gr.update(value=0.0), gr.update(value=0.1),
gr.update(value=1.0), gr.update(value=10),
gr.update(value=1.0), gr.update(value=0.5), gr.update(value=1.0),
gr.update(value=1.0), gr.update(value=0.0)
)
with gr.Blocks() as demo:
with gr.Tabs():
with gr.TabItem("Secant Approximation"):
gr.HTML("<p><strong>Secant Approximation</strong> <span style='cursor:help' title='Approximates derivative via (f(x+h)-f(x))/h'></span></p>")
with gr.Row():
with gr.Column(scale=3):
h = gr.Slider(0.001, 1.0, value=0.01, step=0.001, label="h")
p1, m1 = gr.Plot(), gr.Markdown()
h.change(update_secant, [h], [p1, m1])
with gr.Column(scale=1):
gr.HTML("<p><strong>Key Question:</strong><br>What does the secant slope approximate?<br><span style='cursor:help' title='The instantaneous rate of change (derivative).'>❔</span></p>")
with gr.TabItem("Tangent Visualization"):
gr.HTML("<p><strong>Tangent Visualization</strong> <span style='cursor:help' title='Shows tangent line at x and slope dy/dx'></span></p>")
with gr.Row():
with gr.Column(scale=3):
x0 = gr.Slider(-1.0, 2.0, value=0.0, step=0.1, label="x")
p2, m2 = gr.Plot(), gr.Markdown()
x0.change(update_tangent, [x0], [p2, m2])
with gr.Column(scale=1):
gr.HTML("<p><strong>Key Question:</strong><br>What does the tangent line represent?<br><span style='cursor:help' title='The instantaneous rate of change at the point.'>❔</span></p>")
with gr.TabItem("Gradient Descent"):
gr.HTML("<p><strong>Gradient Descent</strong> <span style='cursor:help' title='Shows gradient descent steps on x^2 curve'></span></p>")
with gr.Row():
with gr.Column(scale=3):
lr = gr.Slider(0.01, 0.5, value=0.1, step=0.01, label="Learning Rate")
init = gr.Slider(-2.0, 2.0, value=1.0, step=0.1, label="Initial x")
st = gr.Slider(1, 50, value=10, step=1, label="Iterations")
pg, mg = gr.Plot(), gr.Markdown()
for inp in [lr, init, st]: inp.change(update_gd, [lr, init, st], [pg, mg])
with gr.Column(scale=1):
gr.HTML("<p><strong>Key Question:</strong><br>How does gradient descent move?<br><span style='cursor:help' title='It moves opposite to the gradient towards the function minimum.'>❔</span></p>")
with gr.TabItem("Chain Rule"):
gr.HTML("<p><strong>Chain Rule</strong> <span style='cursor:help' title='Visualizes chain rule in computation graph'></span></p>")
with gr.Row():
with gr.Column(scale=3):
x_s = gr.Slider(0.0, 2.0, value=1.0, step=0.1, label="x")
cp, cm = gr.Plot(), gr.Markdown()
x_s.change(update_chain, [x_s], [cp, cm])
with gr.Column(scale=1):
gr.HTML("<p><strong>Key Question:</strong><br>How is dL/dx computed?<br><span style='cursor:help' title='By multiplying partial derivatives along graph: 4×3×2.'>❔</span></p>")
with gr.TabItem("Backpropagation"):
gr.HTML("<p><strong>Backpropagation</strong> <span style='cursor:help' title='Visualizes backprop in a simple DNN'></span></p>")
with gr.Row():
with gr.Column(scale=3):
xb = gr.Slider(-2.0, 2.0, value=0.5, step=0.1, label="x")
w1b = gr.Slider(-2.0, 2.0, value=1.0, step=0.1, label="w1")
w2b = gr.Slider(-2.0, 2.0, value=1.0, step=0.1, label="w2")
tb = gr.Slider(-2.0, 2.0, value=0.0, step=0.1, label="t")
pb, mb = gr.Plot(), gr.Markdown()
for inp in [xb, w1b, w2b, tb]: inp.change(update_bp, [xb, w1b, w2b, tb], [pb, mb])
demo.load(load_secant, [], [p1, m1])
demo.load(load_tangent, [], [p2, m2])
demo.load(load_gd, [], [pg, mg])
demo.load(load_chain, [], [cp, cm])
demo.load(load_bp, [], [pb, mb])
with gr.Row():
reset_btn = gr.Button("Reset to default settings")
gr.HTML("<span style='cursor:help' title='Reset all sliders to defaults.'></span>")
reset_btn.click(reset_all, [], [h, x0, lr, init, st, x_s, xb, w1b, w2b, tb])
demo.launch()