Spaces:
Sleeping
Sleeping
import torch | |
import torch.nn as nn | |
import torch.optim as optim | |
import numpy as np | |
import matplotlib.pyplot as plt | |
from sklearn.datasets import make_moons | |
from sklearn.model_selection import train_test_split | |
import gradio as gr | |
# Generate a synthetic dataset | |
X, y = make_moons(n_samples=500, noise=0.2, random_state=0) | |
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42) | |
# Convert data to PyTorch tensors | |
X_train = torch.tensor(X_train, dtype=torch.float32) | |
y_train = torch.tensor(y_train, dtype=torch.long) | |
X_test = torch.tensor(X_test, dtype=torch.float32) | |
y_test = torch.tensor(y_test, dtype=torch.long) | |
# Define the neural network architecture | |
class SimpleNN(nn.Module): | |
def __init__(self): | |
super(SimpleNN, self).__init__() | |
self.fc1 = nn.Linear(2, 10) | |
self.fc2 = nn.Linear(10, 10) | |
self.fc3 = nn.Linear(10, 2) | |
self.relu = nn.ReLU() | |
def forward(self, x): | |
x = self.relu(self.fc1(x)) | |
x = self.relu(self.fc2(x)) | |
x = self.fc3(x) | |
return x | |
# Function to plot the decision boundary | |
def plot_decision_boundary(pred_func, X, y): | |
x_min, x_max = X[:, 0].min() - 0.5, X[:, 0].max() + 0.5 | |
y_min, y_max = X[:, 1].min() - 0.5, X[:, 1].max() + 0.5 | |
xx, yy = np.meshgrid(np.arange(x_min, x_max, 0.01), | |
np.arange(y_min, y_max, 0.01)) | |
Z = pred_func(np.c_[xx.ravel(), yy.ravel()]) | |
Z = np.argmax(Z, axis=1).reshape(xx.shape) | |
plt.contourf(xx, yy, Z, levels=[-1, 0, 1], colors=['blue', 'red'], alpha=0.3) | |
plt.scatter(X[y == 0][:, 0], X[y == 0][:, 1], color='blue', label='Class 0', edgecolor='k') | |
plt.scatter(X[y == 1][:, 0], X[y == 1][:, 1], color='red', label='Class 1', edgecolor='k') | |
plt.xlabel("Feature 1") | |
plt.ylabel("Feature 2") | |
plt.title("Decision Boundary and Dataset with Classifier Background") | |
plt.legend() | |
# Gradio function to train model for given epochs and return plot and accuracy | |
def train_and_plot(epochs): | |
# Initialize the network, loss function, and optimizer | |
model = SimpleNN() | |
criterion = nn.CrossEntropyLoss() | |
optimizer = optim.Adam(model.parameters(), lr=0.01) | |
# Training loop | |
for epoch in range(epochs): | |
model.train() | |
optimizer.zero_grad() | |
outputs = model(X_train) | |
loss = criterion(outputs, y_train) | |
loss.backward() | |
optimizer.step() | |
# Plot the final decision boundary after training completes | |
plt.figure(figsize=(6, 4)) | |
plot_decision_boundary(lambda x: model(torch.tensor(x, dtype=torch.float32)).detach().numpy(), X, y) | |
plt.title(f'Decision Boundary at Epoch: {epochs}') | |
plt.show() | |
# Evaluate the model | |
model.eval() | |
with torch.no_grad(): | |
outputs = model(X_test) | |
_, predicted = torch.max(outputs, 1) | |
accuracy = (predicted == y_test).float().mean().item() * 100 | |
return plt, f"Test Accuracy: {accuracy:.2f}%" | |
# Create Gradio interface | |
interface = gr.Interface( | |
fn=train_and_plot, | |
inputs=gr.Slider(20, 200, step=10, label="Number of Epochs"), | |
outputs=["plot", "text"], | |
title="Neural Network Decision Boundary Visualization", | |
description="Enter the number of epochs (between 20 and 200) to train the model and view the decision boundary and accuracy." | |
) | |
# Launch the Gradio app (necessary for Hugging Face deployment) | |
if __name__ == "__main__": | |
interface.launch() | |