import numpy as np
from sklearn.datasets import load_digits
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score

class SoftmaxRegression:
    def __init__(self, num_classes, num_features, learning_rate=0.1):
        self.num_classes = num_classes
        self.num_features = num_features
        self.learning_rate = learning_rate
        self.weights = np.zeros((num_features, num_classes))
        self.biases = np.zeros(num_classes)

    def softmax(self, z):
        exp_z = np.exp(z)
        return exp_z / np.sum(exp_z, axis=1, keepdims=True)

    def predict(self, X):
        z = np.dot(X, self.weights) + self.biases
        return np.argmax(self.softmax(z), axis=1)

    def fit(self, X, y, num_epochs=100):
        for epoch in range(num_epochs):
            z = np.dot(X, self.weights) + self.biases
            p = self.softmax(z)
            loss = -np.mean(np.log(p[range(len(y)), y]))
            grad_z = p
            grad_z[range(len(y)), y] -= 1
            grad_w = np.dot(X.T, grad_z)
            grad_b = np.sum(grad_z, axis=0)
            self.weights -= self.learning_rate * grad_w
            self.biases -= self.learning_rate * grad_b

digits = load_digits()
X, y = digits.data, digits.target
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)

model = SoftmaxRegression(num_classes=10, num_features=X.shape[1])
model.fit(X_train, y_train)
y_pred = model.predict(X_test)
print("Accuracy:", accuracy_score(y_test, y_pred))