from matplotlib import pyplot as plt
import pandas as pd
import statsmodels.formula.api as smf
from sqlalchemy import create_engine
import pymysql
import statsmodels.api as sm
import numpy as np

# 数据库配置
db_config = {
    'host':'localhost',
    'user':'root',
    'password':'MYSQL123',
    'database':'mysql',
    'port':3306,
    'charset':'utf8mb4',
}

# 创建数据库连接
engine = create_engine(
    f"mysql+pymysql://{db_config['user']}:{db_config['password']}@{db_config['host']}:{db_config['port']}/{db_config['database']}"
)
conn = pymysql.connect(**db_config)
chunk_size = 10000

# 修正SQL查询（注意LFFT JOIN改为LEFT JOIN）
df = pd.read_sql_query(
    """
    SELECT d.*, m.net_mf_vol, m.sell_elg_vol, m.buy_elg_vol, m.sell_lg_vol, m.buy_lg_vol, i.closes as i_closes, i.vol as i_vol
    FROM date_1 d
    JOIN moneyflows m
    ON d.ts_code = m.ts_code AND d.trade_date = m.trade_date
    LEFT JOIN index_daily i ON d.trade_date = i.trade_date AND i.ts_code='000001.SH'
    WHERE d.trade_date BETWEEN '2023-01-01' AND '2023-12-31' AND d.ts_code='000001.SZ'
    """,
    conn,
    chunksize=chunk_size
)

# 合并所有chunks
df1 = pd.concat(df, ignore_index=True)

# 检查数据是否成功获取
if df1.empty:
    raise ValueError("查询返回空数据，请检查SQL查询条件和数据库连接")

print("前5行数据预览:")
print(df1.head())

# 计算指标
df1['zd_close'] = df1['closes'].shift(1)  # 这里可能是计算涨跌幅，原代码有误
df1['hz_close'] = round((df1['i_closes'] - df1['i_closes'].shift(1)) / df1['i_closes'].shift(1), 2)
df1['hz_vol'] = round((df1['i_vol'].shift(1) - df1['i_vol'].shift(2)) / df1['i_vol'].shift(2), 2)

# 删除缺失值
df1 = df1.dropna(subset=['zd_close']).reset_index(drop=True)

# 选择特征变量
# 在PCA计算前添加数据清洗步骤
x = df1[['amount', 'net_mf_vol', 'sell_elg_vol', 'buy_lg_vol', 'hz_close', 'hz_vol']].copy()

# 1. 检查并处理无穷大值
x = x.replace([np.inf, -np.inf], np.nan)

# 2. 检查缺失值比例
print("缺失值统计:")
print(x.isnull().sum())

# 3. 处理缺失值（这里用均值填充，可根据数据特点选择其他方法）
x = x.fillna(x.mean())

# 4. 再次确认无缺失或无穷值
assert not np.any(np.isnan(x.values)), "数据仍包含NaN"
assert not np.any(np.isinf(x.values)), "数据仍包含inf"

# 5. 标准化数据（PCA前推荐步骤）
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
x_scaled = scaler.fit_transform(x)

# 6. 计算协方差矩阵时添加错误处理
try:
    cov_matrix = np.cov(x_scaled, rowvar=False)
    eigenvalues, eigenvectors = np.linalg.eig(cov_matrix)
except Exception as e:
    print("PCA计算失败:", str(e))
    raise

print('累计贡献率为:', round(eigenvalues[:5].sum()/eigenvalues.sum(), 4)*100, '%')
n_components = 5
top_eigenvectors = eigenvectors[:, :n_components]

# 计算主成分
principal_components = np.dot(x, top_eigenvectors)
data_pca = pd.concat([df1, pd.DataFrame(principal_components,
                   columns=[f'PC{i+1}' for i in range(n_components)])], axis=1)

# 使用所有主成分进行回归
X_pca = data_pca[[f'PC{i+1}' for i in range(n_components)]].copy()
X_pca = sm.add_constant(X_pca)
y = df1['zd_close'].copy()

model_pca = sm.OLS(y, X_pca)
result_pca = model_pca.fit()

print("\n全主成分回归模型结果:")
print(result_pca.summary())

# 选择部分主成分进行回归
X_pca_selected = data_pca[['PC1', 'PC3', 'PC4']].copy()
X_pca_selected = sm.add_constant(X_pca_selected)

model_pca_selected = sm.OLS(y, X_pca_selected)
result_pca_selected = model_pca_selected.fit()

print("\n选择主成分回归模型结果:")
print(result_pca_selected.summary())

# 绘制散点图
fig, axes = plt.subplots(nrows=1, ncols=3, figsize=(15, 5))

for i, col in enumerate(['PC1', 'PC3', 'PC4']):
    axes[i].scatter(X_pca_selected[col], y, s=50, alpha=0.7)
    axes[i].set_xlabel(col)
    axes[i].set_ylabel('zd_close')
    axes[i].set_title(f'{col} vs zd_close')

plt.tight_layout()
plt.show()

# 输出主成分表达式
print("\n主成分表达式:")
for k in range(5):  # 显示前5个主成分
    string_y = f'PC{k+1} = '
    for j in range(len(eigenvectors[k])):
        coef = round(eigenvectors[k][j], 2)
        if j == 0:
            string_y += f'{coef}*X{j+1}'
        else:
            if coef >= 0:
                string_y += f' + {coef}*X{j+1}'
            else:
                string_y += f' - {abs(coef)}*X{j+1}'
    print(string_y)

# 关闭数据库连接
conn.close()
engine.dispose()