# 虚拟环境是 conda allennlp_env
import json
from allennlp.predictors.predictor import Predictor
from sqlglot import parse_one
from sqlglot import exp


def replace_table_aliases_with_subquery_check(sql):
    parsed = parse_one(sql)

    alias_to_table = {}

    for node in parsed.find_all(exp.Table):
        alias = node.args.get("alias")
        if alias:
            alias_name = alias.name
            table_name = node.name
            alias_to_table[alias_name] = table_name
            node.set("alias", None)  # 删除普通表别名

    for node in parsed.find_all(exp.Subquery):
        alias = node.args.get("alias")
        if alias:
            alias_name = alias.name
            if alias_name in alias_to_table:
                del alias_to_table[alias_name]  # 移除对 Subquery 的替换

    for node in parsed.walk():
        if isinstance(node, exp.Column):
            table = node.table
            if table in alias_to_table:
                node.set("table", alias_to_table[table])

    return parsed.sql()


# from allennlp.models.archival import load_archive
# from contextlib import ExitStack
# import argparse


def get_predictor():
    # return Predictor.from_path("https://s3-us-west-2.amazonaws.com/allennlp/models/srl-model-2020.02.10.tar.gz")
    # return Predictor.from_path("https://storage.googleapis.com/allennlp-public-models/structured-prediction-srl-bert.2020.12.15.tar.gz")


def get_srl(predictor, sentence):
    value = predictor.predict(sentence)
    return value


def preprocess(data_path):
    # load spider dataset
    with open(data_path, "r") as f:
        data = json.load(f)
    # load the model
    predictor = get_predictor()

    processed_data = []
    for item in data:
        question = item["question"]
        logging.info(f"Processing question: {question}")
        srl = get_srl(predictor, question)
        sql = item["query"]
        processed_sql = replace_table_aliases_with_subquery_check(sql)
        parsed_sql = parse_one(processed_sql)

        processed_data.append({
            "question": question,
            "srl": srl,
            "sql": sql,
            "processed_sql": processed_sql,
            "parsed_sql": repr(parsed_sql)
        })
    logging.info(f"Processed {len(processed_data)} items.")
    # save processed data to a new json file
    with open(f"", "w") as f:
        json.dump(processed_data, f, indent=4)


if __name__ == "__main__":
    # logging
    import logging
    logging.basicConfig(
        filename="", level=logging.INFO)
    # 1. Preprocess the data
    preprocess("")
