import redis
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.metrics import roc_auc_score, average_precision_score
import time
import os

REDIS_HOST = '127.0.0.1'
REDIS_PORT = 6379
GRAPH_NAME = 'TWeibo'
BATCH_SIZE = 5_000_000   
EMB_DIM = 128
FP16 = True              
EDGE_CHUNK = 500_000     

pool = redis.ConnectionPool(host=REDIS_HOST, port=REDIS_PORT)
r = redis.Redis(connection_pool=pool)

def load_data():
    """高效加载节点 embedding 和按 ID 分块读取边"""

    t0 = time.time()

    q_nodes = "MATCH (n) WHERE EXISTS(n.`embedding:STRING`) RETURN id(n)"
    raw = r.execute_command("GRAPH.QUERY", GRAPH_NAME, q_nodes)[1]
    node_ids = [int(row[0]) for row in raw if row and row[0] is not None]
    if not node_ids:
        raise ValueError("No nodes with embeddings found")
    valid_set = set(node_ids)

    dtype = np.float16 if FP16 else np.float32
    emb_matrix = np.zeros((len(node_ids), EMB_DIM), dtype=dtype)
    valid_count = 0

    CHUNK = 100_000
    for i in range(0, len(node_ids), CHUNK):
        chunk_ids = node_ids[i:i+CHUNK]
        q_emb = (
            f"MATCH (n) WHERE id(n) IN {chunk_ids} "
            f"RETURN id(n), n.`embedding:STRING`"
        )
        rows = r.execute_command("GRAPH.QUERY", GRAPH_NAME, q_emb)[1]
        for nid, emb_str in rows:
            nid = int(nid)
            idx = node_ids.index(nid)
            if emb_str:
                arr = np.fromstring(emb_str.decode('utf-8').strip('[]'),
                                    dtype=dtype, sep=',')
                if arr.shape[0] != EMB_DIM:
                    arr = np.resize(arr, EMB_DIM)
                emb_matrix[idx] = arr
                valid_count += 1
            else:
                emb_matrix[idx] = np.random.normal(0, 0.01, EMB_DIM)

    t1 = time.time()

    q_range = "MATCH (n) RETURN min(id(n)), max(id(n))"
    lo_id, hi_id = r.execute_command("GRAPH.QUERY", GRAPH_NAME, q_range)[1][0]
    lo_id, hi_id = int(lo_id), int(hi_id)

    pos_edges = []
    for lo in range(lo_id, hi_id + 1, EDGE_CHUNK):
        hi = lo + EDGE_CHUNK
        q_edge = (
            f"MATCH (a)-[]->(b) "
            f"WHERE id(a) >= {lo} AND id(a) < {hi} "
            f"RETURN id(a), id(b)"
        )
        batch = r.execute_command("GRAPH.QUERY", GRAPH_NAME, q_edge)[1]
        for src, dst in batch:
            src, dst = int(src), int(dst)
            if src in valid_set and dst in valid_set and src != dst:
                pos_edges.append((src, dst))
        print(f"  chunk [{lo}–{hi}) → collected {len(pos_edges)} edges so far", end='\r')

    pos_edges = np.array(pos_edges, dtype=np.int64)
    pos_edges = np.unique(np.sort(pos_edges, axis=1), axis=0)

    id_map = {nid: idx for idx, nid in enumerate(node_ids)}
    return emb_matrix, id_map, pos_edges

def generate_negatives(pos_edges, all_nodes, neg_ratio=1):
    """向量化负采样（与原版相同）"""

    t0 = time.time()
    pos_set = set(map(tuple, pos_edges))
    num_neg = int(len(pos_edges) * neg_ratio)

    cand = np.unique(
        np.sort(
            np.random.choice(all_nodes, size=(num_neg*3, 2)), axis=1
        ), axis=0
    )
    mask = np.array([tuple(p) not in pos_set for p in cand])
    cand = cand[mask]
    cand = cand[cand[:,0] != cand[:,1]]
    neg_edges = cand[:num_neg]

    return neg_edges

def batched_cosine(emb_matrix, edges):
    """批量计算余弦相似度（与原版相同）"""

    t0 = time.time()
    src_idx, dst_idx = edges[:,0], edges[:,1]
    scores = np.empty(len(edges), dtype=np.float32)

    for i in range(0, len(edges), BATCH_SIZE):
        sl = slice(i, min(i+BATCH_SIZE, len(edges)))
        a = emb_matrix[src_idx[sl]].astype(np.float32)
        b = emb_matrix[dst_idx[sl]].astype(np.float32)
        norm = np.linalg.norm(a, axis=1) * np.linalg.norm(b, axis=1)
        norm[norm == 0] = 1e-8
        scores[sl] = np.sum(a * b, axis=1) / norm

    return scores

def main():
    os.environ["NPY_USE_GPU_IF_AVAILABLE"] = "0"  
    emb_matrix, id_map, pos_edges = load_data()
    all_nodes = np.array(list(id_map.keys()), dtype=np.int64)

    pos_idx = np.vectorize(id_map.get)(pos_edges)
    neg_edges = generate_negatives(pos_edges, all_nodes)
    neg_idx = np.vectorize(id_map.get)(neg_edges)

    X = np.vstack([pos_idx, neg_idx])
    y = np.hstack([np.ones(len(pos_idx)), np.zeros(len(neg_idx))])
    X_train, X_test, y_train, y_test = train_test_split(
        X, y, test_size=0.2, random_state=42
    )

    train_scores = batched_cosine(emb_matrix, X_train)
    test_scores = batched_cosine(emb_matrix, X_test)

    print(f"Train AUC: {roc_auc_score(y_train, train_scores):.4f}")
    print(f"Test  AUC: {roc_auc_score(y_test, test_scores):.4f}")
    print(f"Test  AP : {average_precision_score(y_test, test_scores):.4f}")

if __name__ == "__main__":
    main()
