import torch
import torch.nn as nn
import numpy as np


class DecisionTree(nn.Module):
    def __init__(self, max_depth=3):
        super(DecisionTree, self).__init__()
        self.max_depth = max_depth

    def fit(self, X, y):
        self.tree = self._build_tree(X, y, depth=0)

    def _build_tree(self, X, y, depth):
        n_samples, n_features = X.shape
        # 如果所有样本属于同一类别，或者已达到最大深度，停止分裂
        if len(set(y)) == 1 or depth == self.max_depth:
            return {"label": np.bincount(y).argmax()}

        # 寻找最佳分裂
        best_split = self._best_split(X, y)
        if best_split is None:
            return {"label": np.bincount(y).argmax()}

        left_tree = self._build_tree(X[best_split['left']], y[best_split['left']], depth + 1)
        right_tree = self._build_tree(X[best_split['right']], y[best_split['right']], depth + 1)

        return {"feature": best_split['feature'],
                "threshold": best_split['threshold'],
                "left": left_tree,
                "right": right_tree}

    def _best_split(self, X, y):
        best_split = None
        best_gini = float('inf')

        n_samples, n_features = X.shape
        for feature in range(n_features):
            thresholds = np.unique(X[:, feature])
            for threshold in thresholds:
                left_mask = X[:, feature] <= threshold
                right_mask = ~left_mask

                left_y, right_y = y[left_mask], y[right_mask]
                if len(left_y) == 0 or len(right_y) == 0:
                    continue

                gini_left = 1 - sum((np.sum(left_y == c) / len(left_y)) ** 2 for c in np.unique(left_y))
                gini_right = 1 - sum((np.sum(right_y == c) / len(right_y)) ** 2 for c in np.unique(right_y))
                gini = (len(left_y) * gini_left + len(right_y) * gini_right) / len(y)

                if gini < best_gini:
                    best_gini = gini
                    best_split = {"feature": feature, "threshold": threshold,
                                  "left": left_mask, "right": right_mask}

        return best_split

    def predict(self, X):
        return [self._predict_single(x, self.tree) for x in X]

    def _predict_single(self, x, tree):
        if "label" in tree:
            return tree["label"]

        if x[tree["feature"]] <= tree["threshold"]:
            return self._predict_single(x, tree["left"])
        else:
            return self._predict_single(x, tree["right"])


# 使用示例
X_train = np.array([[2, 3], [10, 15], [6, 6], [1, 2], [12, 10]])
y_train = np.array([0, 1, 1, 0, 1])

# 创建并训练决策树
tree = DecisionTree(max_depth=3)
tree.fit(X_train, y_train)

# 进行预测
X_test = np.array([[7, 7], [3, 4]])
predictions = tree.predict(X_test)

print("预测结果:", predictions)
