import numpy as np
import pandas as pd

# 定义 K 折交叉验证函数
def k_fold_cross_validation(X, y, k=5):
    n_samples = len(X)
    indices = np.arange(n_samples)
    np.random.shuffle(indices)  # 随机打乱索引

    fold_sizes = np.full(k, n_samples // k, dtype=int)
    fold_sizes[:n_samples % k] += 1
    current = 0
    for fold_size in fold_sizes:
        start, stop = current, current + fold_size
        test_indices = indices[start:stop]
        train_indices = np.concatenate([indices[:start], indices[stop:]])

        yield train_indices, test_indices

        current = stop

#数据集在../iris/..
data_path = 'C:/Users/董蝶菲/Desktop/工具模块/iris/iris.csv'
df = pd.read_csv(data_path)

df = df.apply(pd.to_numeric, errors='coerce')

df.fillna(df.mean(), inplace=True)

X = df.iloc[:, :-1].values
y = df.iloc[:, -1].values

X = X.astype(float)
y = y.astype(float)

# 定义一个简单的线性回归模型作为示例
class SimpleLinearRegression:
    def fit(self, X_train, y_train):
        X_train = np.hstack((np.ones((X_train.shape[0], 1)), X_train))
        try:
            self.coefficients = np.linalg.inv(X_train.T.dot(X_train)).dot(X_train.T).dot(y_train)
        except np.linalg.LinAlgError as e:
            print("Matrix is singular or not square, cannot compute inverse.")
            print(e)

    def predict(self, X_test):

        X_test = np.hstack((np.ones((X_test.shape[0], 1)), X_test))
        return X_test.dot(self.coefficients)

model = SimpleLinearRegression()

k = 5  # 5 折交叉验证
i = 1
for train_index, test_index in k_fold_cross_validation(X, y, k):
    print('\nFold {}/{}'.format(i, k))
    X_train, X_test = X[train_index], X[test_index]
    y_train, y_test = y[train_index], y[test_index]

    model.fit(X_train, y_train)
    pred_test = model.predict(X_test)

    # 计算均方误差 MSE 或者 R^2 分数作为回归模型的评估指标
    mse = np.mean((pred_test - y_test) ** 2)
    print('Mean Squared Error:', mse)

    i += 1


