import numpy as np
from scipy.spatial.distance import cdist
import warnings
warnings.filterwarnings("ignore")
from sklearn.cluster import k_means
from sklearn.preprocessing import MinMaxScaler
import pandas as pd

GBi_dict = {}

def GBi_number(idx1, point_to_gb, attr_name=None):
    global GBi_dict
    if attr_name is None:
        attr_name = f"a{idx1 + 1}"
    unique_labels = np.unique(point_to_gb)
    for i, label in enumerate(unique_labels):
        indices = np.where(point_to_gb == label)[0]
        GBi_dict[f"GB[{attr_name},{i + 1}]"] = {f"x{idx+1}" for idx in indices}
    # 仅打印划分结果
    print(f"属性 {attr_name} 的 point_to_gb 已存储: {point_to_gb}")

def calculate_center_and_radius(gb):
    center = gb.mean(axis=0)
    radius = np.max(np.linalg.norm(gb - center, axis=1))
    return center, radius

def splits(gb_list, num, k=2):
    gb_list_new = []
    for gb in gb_list:
        if gb.shape[0] < num:
            gb_list_new.append(gb)
        else:
            gb_list_new.extend(splits_ball(gb, k))
    return gb_list_new

def splits_ball(gb, k):
    unique_data = np.unique(gb, axis=0)
    if unique_data.shape[0] < k:
        k = unique_data.shape[0]
    centers, labels, _ = k_means(X=gb, n_clusters=k, n_init=1, random_state=8)
    return [gb[labels == lab] for lab in range(k)]

def assign_points_to_closest_gb(data, gb_centers):
    assigned = np.zeros(data.shape[0], dtype=int)
    for idx, sample in enumerate(data):
        assigned[idx] = np.argmin(np.linalg.norm(sample - gb_centers, axis=1))
    return assigned

def fuzzy_similarity(t_data, sigma=0, k=2):
    t_n, t_m = t_data.shape
    gb_list = [t_data]
    num = np.ceil(t_n ** 0.5)
    while True:
        prev = len(gb_list)
        gb_list = splits(gb_list, num=num, k=k)
        if prev == len(gb_list):
            break

    gb_centers = np.zeros((len(gb_list), t_m))
    for i, gb in enumerate(gb_list):
        gb_centers[i], _ = calculate_center_and_radius(gb)

    point_to_gb = assign_points_to_closest_gb(t_data, gb_centers)
    pt_centers = gb_centers[point_to_gb]
    #sim = 1 - cdist(pt_centers, pt_centers) / t_m
    sim = 1 - cdist(pt_centers, pt_centers) / 3
    sim[sim < sigma] = 0
    return point_to_gb, sim

def compute_membership_matrix(GBi_dict, attr):
    keys = {k: v for k, v in GBi_dict.items() if k.startswith(f"GB[{attr},")}
    if not keys:
        return np.empty((0, 0))
    sorted_keys = sorted(keys.keys(),
                         key=lambda k: int(k.split(",")[1].rstrip("]")))
    pts = {int(x[1:]) for key in sorted_keys for x in GBi_dict[key]}
    n_pts = max(pts)
    mat = np.zeros((len(sorted_keys), n_pts), dtype=int)
    for i, key in enumerate(sorted_keys):
        for x in GBi_dict[key]:
            mat[i, int(x[1:]) - 1] = 1
    return mat

def GBFRD(data, sigma=0):
    n, m = data.shape
    LA = np.arange(m)

    all_attr = "".join(f"a{j+1}" for j in LA)
    pt2gb_all, _ = fuzzy_similarity(data[:, LA], sigma, k=2)
    GBi_number(0, pt2gb_all, all_attr)
    membership_all = compute_membership_matrix(GBi_dict, all_attr)

    # 单属性依赖度计算
    single_R = []
    for idx in LA:
        attr = f"a{idx+1}"
        pt2gb, sim = fuzzy_similarity(data[:, [idx]], sigma, k=2)
        GBi_number(idx, pt2gb, attr)

        rel = 1 - sim
        POS = 0.0
        for b in range(membership_all.shape[0]):
            # 计算 low_approx
            low = np.maximum(rel, membership_all[b, :]).min(axis=1)
            POS += low.sum()
        R = POS / n
        print(f"{attr} 的 POS = {POS:.4f}, 依赖度 R = {R:.4f}")
        single_R.append((idx, R))

    base_idx, base_R = max(single_R, key=lambda x: x[1])
    base_attr = f"a{base_idx+1}"
    print(f"\n选取基底属性: {base_attr} (R = {base_R:.4f})")

    # 基底与其余属性组合
    best_combo = (None, -1.0)
    for idx in LA:
        if idx == base_idx:
            continue
        combo = [base_idx, idx]
        name = f"a{base_idx+1}a{idx+1}"

        pt2gb_c, sim_c = fuzzy_similarity(data[:, combo], sigma, k=2)
        GBi_number(base_idx, pt2gb_c, name)

        rel_c = 1 - sim_c
        POS_c = 0.0
        for b in range(membership_all.shape[0]):
            low = np.maximum(rel_c, membership_all[b, :]).min(axis=1)
            POS_c += low.sum()
        R_c = POS_c / n
        print(f"组合 {name} 的 POS = {POS_c:.4f}, 依赖度 R = {R_c:.4f}")

        if R_c > best_combo[1]:
            best_combo = (idx, R_c)

    chosen_idx, chosen_R = best_combo
    print(f"\n最优组合: {base_attr} + a{chosen_idx+1}, 依赖度 R = {chosen_R:.4f}")

    return 0


if __name__ == "__main__":
    data = pd.read_csv("Code/guiyihua_test.csv").values
    mask = (data >= 1).all(axis=0) & (data.max(axis=0) != data.min(axis=0))
    if mask.any():
        data[:, mask] = MinMaxScaler().fit_transform(data[:, mask])

    sigma = 0.5
    GBFRD(data, sigma)
