from neo4j import GraphDatabase
import csv
import json
from tqdm import tqdm

def sanitize_label(label):
    """清理标签，移除非法字符"""
    return label.strip().replace("/", "_").replace(" ", "_").replace("'", "").replace('"', '').replace("@", "AT_").replace(":", "_")

def create_index(session):
    """创建索引，如果索引已存在则跳过"""
    # 使用SHOW INDEXES替代CALL db.indexes()
    result = session.run("SHOW INDEXES")
    indexes = result.data()
    index_exists = any(index["labelsOrTypes"] == ["Node"] and index["properties"] == ["nid"] for index in indexes)
    if not index_exists:
        cypher = "CREATE INDEX FOR (n:Node) ON (n.nid)"
        session.run(cypher)
        print("索引创建完成。")
    else:
        print("索引已存在，跳过创建。")

def import_nodes(uri, user, password, batch_insert_file, batch_size=1000):
    driver = GraphDatabase.driver(uri, auth=(user, password))
    total_nodes_imported = 0
    with driver.session() as session:
        create_index(session)
        with open(batch_insert_file, "r", newline='', encoding='utf-8') as f:
            reader = csv.reader(f)
            batch = []
            total_rows = sum(1 for row in reader)
            f.seek(0)
            progress_bar = tqdm(reader, total=total_rows, desc="导入节点", unit="行")
            for row_num, row in enumerate(progress_bar, 1):
                try:
                    labels_str, properties_str = row
                    labels = [sanitize_label(label) for label in labels_str.split(", ")]
                    properties = json.loads(properties_str)

                    nid = properties.pop("nid", None)
                    if nid is None:
                        raise ValueError("nid字段缺失")

                    batch.append((nid, labels, properties))

                    if len(batch) >= batch_size:
                        execute_batch(session, batch)
                        total_nodes_imported += len(batch)
                        batch = []

                except Exception as e:
                    print(f"处理行 {row_num} 时出错: {str(e)}")
                    print(f"原始数据: {row}")
                    continue

            if batch:
                execute_batch(session, batch)
                total_nodes_imported += len(batch)
    print(f"节点导入完成。总共导入 {total_nodes_imported} 个节点。")

def execute_batch(session, batch):
    tx = session.begin_transaction()
    for nid, labels, properties in batch:
        cypher = (
            f"CREATE (n:`{'`:`'.join(labels)}`) "
            "SET n = $props, n.nid = $nid"
        )
        tx.run(cypher, props=properties, nid=nid)
    tx.commit()

def import_relationships(uri, user, password, batch_relationships_file, batch_size=1000):
    driver = GraphDatabase.driver(uri, auth=(user, password))
    total_relationships_imported = 0
    with driver.session() as session:
        with open(batch_relationships_file, "r", newline='', encoding='utf-8') as f:
            reader = csv.reader(f)
            batch = []
            total_rows = sum(1 for row in reader)
            f.seek(0)
            progress_bar = tqdm(reader, total=total_rows, desc="导入关系", unit="行")
            for row_num, row in enumerate(progress_bar, 1):
                try:
                    start_id, end_id, rel_type, properties_str = row
                    start_id = int(start_id.strip())
                    end_id = int(end_id.strip())
                    rel_type = sanitize_label(rel_type.strip())
                    properties = json.loads(properties_str)

                    batch.append((start_id, end_id, rel_type, properties))

                    if len(batch) >= batch_size:
                        execute_relationship_batch(session, batch)
                        total_relationships_imported += len(batch)
                        batch = []

                except Exception as e:
                    print(f"处理行 {row_num} 时出错: {str(e)}")
                    print(f"原始数据: {row}")
                    continue

            if batch:
                execute_relationship_batch(session, batch)
                total_relationships_imported += len(batch)
    print(f"关系导入完成。总共导入 {total_relationships_imported} 条关系。")

def execute_relationship_batch(session, batch):
    tx = session.begin_transaction()
    for start_id, end_id, rel_type, properties in batch:
        cypher = (
            "MATCH (a {nid: $start_id}), (b {nid: $end_id}) "
            "CREATE (a)-[r:`" + rel_type + "`]->(b) "
            "SET r = $props"
        )
        tx.run(cypher, start_id=start_id, end_id=end_id, props=properties)
    tx.commit()

if __name__ == "__main__":
    neo4j_uri = "bolt://localhost:7687"
    neo4j_user = "neo4j"
    neo4j_password = "88888888"
    batch_insert_file = "file/batchNodes.csv"
    batch_relationships_file = "file/batchRelationships.csv"

    import_nodes(neo4j_uri, neo4j_user, neo4j_password, batch_insert_file)
    import_relationships(neo4j_uri, neo4j_user, neo4j_password, batch_relationships_file)