from control.models import *
from literature.models import *
import pandas as pd
from django.db import IntegrityError, transaction
import json
from tqdm import tqdm
import traceback
from datetime import datetime
from basic_component_related_management.models import *
from dataset.models import *
from collections import defaultdict
import os
import glob

def init_oil_product_component_group():
    # 创建组分类别/组分属性/专题数据库
    super_admin_entity = User.objects.filter(user_account="superadmin").first()
    group_name = ["气化用煤数据子库", "天然气数据子库", "原油评价数据子库", "聚合物产品数据子库",
        "物质结构与物性估算数据子库", "相平衡数据子库", "基础物性数据子库", "炼油产品与石油分子子库"]
    
    # 专题数据库
    visible_users = User.objects.filter(user_type="super_admin")
    for name in group_name:
        databank_entity, _ = DataBank.objects.get_or_create(
            name=name,
            zh_name=name,
            creator=super_admin_entity,
            last_editor=super_admin_entity,
            reviewer=super_admin_entity
        )
        databank_entity.visible_users.set(visible_users)

def property_constantProp_mapping():
    matched_files = glob.glob(
        "../final_basic_tables/A076B. PCdata基表-4-应用物性表*.xlsx")
    latest_file = max(matched_files, key=lambda x: x.split(
        "-")[-1]) if matched_files else None
    sheet_name = "推荐物性表（最终版）（全）"
    df = pd.read_excel(latest_file, sheet_name=sheet_name,
                       skiprows=[0, 2], header=0, dtype=str)
    start_row = 0
    num_columns = 28
    selected_data = df.iloc[start_row:, :num_columns].fillna("")
    note_dict = {}
    for index, row in selected_data.iterrows():
        ePropAbbreviation = row["ePropAbbreviation"]
        eAppPropAbbreviation = row["eAppPropAbbreviation"]
        note_dict[eAppPropAbbreviation] = ePropAbbreviation
    return note_dict


def init_dippr_const_data():
    # 初始化批量录入容器
    batch_size = 1000  # 每批处理的数据量

    # 预加载所有必要的数据到内存
    print("预加载必要数据...")
    super_admin_entity = User.objects.filter(user_account="superadmin").first()

    # 加载DIPPR属性
    dippr_attribute_entity = Attribute.objects.filter(
        name_en="nDipprID").first()
    if dippr_attribute_entity is None:
        print("DIPPR属性不存在")
        return

    # 加载默认单位
    unit_1_entity = Unit.objects.filter(sei_id="U-0-1").first()
    if unit_1_entity is None:
        print("默认单位不存在")
        return

    # 加载数据类型
    data_present_unknow_entity = DataPresentWay.objects.filter(
        name="Unknown").first()
    if data_present_unknow_entity is None:
        data_present_unknow_entity = DataPresentWay.objects.create(
            name="Unknown", creator=super_admin_entity, last_editor=super_admin_entity
        )

    # 加载实验目的
    purpose_others_entity = ExpPurpose.objects.filter(name="others").first()
    if purpose_others_entity is None:
        purpose_others_entity = ExpPurpose.objects.create(
            name="others", creator=super_admin_entity, last_editor=super_admin_entity
        )

    # 预加载所有查找表
    print("加载查找表...")
    # 加载note数据
    note_json_path = "../dippr_tables/dippr_note.json"
    with open(note_json_path, "r", encoding="utf-8") as file:
        note_dict = json.load(file)

    # 加载DIPPR与ref的映射
    dippr_ref_2_ref_id_dict_path = "../dippr_tables/dippr_id_2_ref_id.json"
    with open(dippr_ref_2_ref_id_dict_path, "r", encoding="utf-8") as file:
        dippr_ref_2_ref_id_dict = json.load(file)

    # 加载所有文献
    all_literatures = {
        lit.liter_code: lit
        for lit in Literature.objects.all().only("liter_code", "uuid")
    }

    # 加载所有物性
    prop_map = property_constantProp_mapping()
    all_properties = {
        ca.abbreviation: ca
        for ca in Property.objects.all().only("abbreviation", "uuid")
    }

    # 加载所有相态
    all_phases = {
        ca.abbreviation: ca
        for ca in Phase.objects.all().only("abbreviation", "uuid")
    }

    # 加载所有单位
    all_units = {
        ca.sei_id: ca
        for ca in Unit.objects.all().only("sei_id", "uuid")
    }

    # 加载DIPPR ID到组件的映射
    print("加载DIPPR组件映射...")
    component_mapping = {
        cav.attribute_value_char: cav.componentidentifier
        for cav in ComponentAttributeValue.objects.filter(
            attribute=dippr_attribute_entity
        ).select_related("componentidentifier")
    }

    # 加载Excel数据
    print("加载Excel数据...")
    folder_path = "../dippr_tables/"
    file_paths = glob.glob(os.path.join(
        folder_path, "3.Const Properties Details*.xlsx"))
    sheet_name = "Sheet1"
    df = pd.read_excel(file_paths[0], sheet_name=sheet_name,
                       header=2, dtype=str)

    # 选择有效数据
    num_columns = 22
    selected_data = df.iloc[:, :num_columns].fillna("")

    # 准备批量创建的数据结构
    datasets = []
    dataset_phases = []
    component_maps = []
    property_maps = []
    datapoints = []
    datapoint_mappings = []
    datapoint_property_maps = []
    databank_entity = DataBank.objects.filter(name="基础物性数据集").first()
    print("开始处理数据...")
    for index, row in tqdm(selected_data.iterrows(), total=len(selected_data)):
        try:
            component_id = row["DIPPR ID"]
            if not component_id:
                continue

            property_abb = row["eAppPropAbbreviation"]
            if not property_abb:
                continue

            # 查找组分
            component_entity = component_mapping.get(component_id)
            if not component_entity:
                print(f"跳过: 未找到DIPPR ID {component_id} 对应的组分")
                continue

            # 查找物性
            property_name = prop_map.get(property_abb)
            if not property_name:
                print(f"跳过: 未找到物性缩写 {property_abb} 的映射")
                continue

            property_entity = all_properties.get(property_name)
            if not property_entity:
                print(f"跳过: 未找到物性 {property_name}")
                continue

            # 处理相态
            phase_symbol = row["sPhaseSymbol"]
            phase_entity = all_phases.get(
                phase_symbol) if phase_symbol else None

            # 处理单位
            unit_id = row["nUnitIDCombine"]
            unit_entity = all_units.get(unit_id) if unit_id else unit_1_entity

            # 处理值
            value = row["Value"]
            error = row["Error"]
            source = row["Source"]
            acceptance = row["Acceptance"]
            note_id = row["Note"]
            dippr_ref_id = row["Reference"]
            data_type = row["Data Type"]

            # 构建描述和注释
            description_parts = []
            comment_parts = []

            if note_id:
                note = note_dict.get(note_id, "")
                if note:
                    description_parts.append(note)
                    comment_parts.append(note)

            if source:
                comment_parts.append(f"source: {source}")
            if acceptance:
                comment_parts.append(f"Acceptance: {acceptance}")

            description = "\n".join(
                description_parts) if description_parts else ""
            comment = "\n".join(comment_parts) if comment_parts else ""

            # 处理数据类型
            if not data_type:
                data_present_way_entity = data_present_unknow_entity
            else:
                if data_type == "Not Specified":
                    data_type = "Unspecified"
                if data_type == "Experiment":
                    data_type = "Experimental"

                data_present_way_entity = DataPresentWay.objects.filter(
                    name=data_type).first()
                if not data_present_way_entity:
                    data_present_way_entity = DataPresentWay.objects.create(
                        name=data_type,
                        creator=super_admin_entity,
                        last_editor=super_admin_entity,
                    )

            # 处理文献引用
            dippr_reference_entity = None
            if dippr_ref_id:
                ref_id = dippr_ref_2_ref_id_dict.get(dippr_ref_id, "")
                if ref_id:
                    ref_id = f"{int(ref_id):07d}"  # 统一补零格式
                    dippr_reference_entity = all_literatures.get(ref_id)

            # 创建数据集名称
            dataset_name = f"{row['Compound']}--{row['Property']}"

            # 准备数据集
            dataset = ExpDataset(
                name=dataset_name,
                zh_name=dataset_name,
                description=description,
                comment=comment,
                databank = databank_entity,
                source_liter=dippr_reference_entity,
                purpose=purpose_others_entity,
                data_present_way=data_present_way_entity,
                review_status=1,
                creator=super_admin_entity,
                last_editor=super_admin_entity,
            )
            datasets.append(dataset)

            # 记录数据集与相态的关系
            if phase_entity:
                dataset_phases.append((len(datasets)-1, phase_entity))

            # 准备组件映射
            component_maps.append({
                "component": component_entity,
                "dataset_index": len(datasets)-1
            })

            # 准备属性映射
            property_maps.append({
                "property": property_entity,
                "unit": unit_entity if unit_entity else None,
                "measurement_error": error if error else None,
                "measurement_error_percent": bool(error),
                "dataset_index": len(datasets)-1
            })

            # 准备数据点
            datapoints.append({
                "dataset_index": len(datasets)-1
            })

            # 准备数据点属性映射
            datapoint_property_maps.append({
                "value": value,
                "measurement_error_value": error if error else None,
                "dataset_index": len(datasets)-1,
                "property_map_index": len(property_maps)-1
            })

            # 批量提交
            if len(datasets) >= batch_size:
                _process_batch(
                    datasets, dataset_phases, component_maps,
                    property_maps, datapoints, datapoint_property_maps,
                    super_admin_entity
                )
                # 清空批量容器
                datasets.clear()
                dataset_phases.clear()
                component_maps.clear()
                property_maps.clear()
                datapoints.clear()
                datapoint_property_maps.clear()

        except Exception as e:
            print(f"处理行 {index} 时出错: {str(e)}")
            traceback.print_exc()
            continue

    # 处理剩余数据
    if datasets:
        _process_batch(
            datasets, dataset_phases, component_maps,
            property_maps, datapoints, datapoint_property_maps,
            super_admin_entity
        )

    print("数据处理完成")


def _process_batch(datasets, dataset_phases, component_maps,
                   property_maps, datapoints, datapoint_property_maps,
                   super_admin_entity):
    """处理一批数据"""
    with transaction.atomic():
        # 1. 批量创建数据集
        created_datasets = ExpDataset.objects.bulk_create(datasets)

        # 2. 处理数据集与相态的多对多关系
        if dataset_phases:
            phase_relations = [
                ExpDataset.phases.through(
                    expdataset_id=created_datasets[idx].uuid,
                    phase_id=phase_entity.uuid
                )
                for idx, phase_entity in dataset_phases
                if idx < len(created_datasets)
            ]

            if phase_relations:
                ExpDataset.phases.through.objects.bulk_create(phase_relations)

        # 3. 批量创建组件映射
        if component_maps:
            component_map_objs = []
            for item in component_maps:
                dataset_idx = item["dataset_index"]
                if dataset_idx >= len(created_datasets):
                    continue

                component_map_objs.append(
                    ExpDatasetComponentMap(
                        exp_dataset=created_datasets[dataset_idx],
                        component=item["component"]
                    )
                )

            ExpDatasetComponentMap.objects.bulk_create(component_map_objs)

        # 4. 批量创建属性映射并处理prop_component关系
        if property_maps:
            prop_map_objs = []
            prop_component_relations = []

            for item in property_maps:
                dataset_idx = item["dataset_index"]
                if dataset_idx >= len(created_datasets):
                    continue

                prop_map_obj = ExpDatasetPropertyMap(
                    exp_dataset=created_datasets[dataset_idx],
                    property=item["property"],
                    unit=item["unit"],
                    measurement_error=item["measurement_error"],
                    measurement_error_percent=item["measurement_error_percent"]
                )
                prop_map_objs.append(prop_map_obj)

            # 批量创建属性映射
            created_prop_maps = ExpDatasetPropertyMap.objects.bulk_create(
                prop_map_objs)

            # 准备prop_component多对多关系
            for i, item in enumerate(property_maps):
                if i >= len(created_prop_maps):
                    continue

                if "component" in item and item["component"]:
                    prop_component_relations.append(
                        ExpDatasetPropertyMap.prop_component.through(
                            expdatasetpropertymap_id=created_prop_maps[i].uuid,
                            componentidentifier_id=item["component"].uuid
                        )
                    )

            # 批量创建prop_component关系
            if prop_component_relations:
                ExpDatasetPropertyMap.prop_component.through.objects.bulk_create(
                    prop_component_relations
                )

        # 5. 批量创建数据点
        if datapoints:
            dp_objs = DataPoint.objects.bulk_create(
                [DataPoint() for _ in datapoints])

            # 6. 建立数据集与数据点的多对多关系
            if dp_objs:
                dataset_dp_relations = []
                for i, item in enumerate(datapoints):
                    if i >= len(dp_objs) or item["dataset_index"] >= len(created_datasets):
                        continue

                    dataset_dp_relations.append(
                        ExpDataset.data_points.through(
                            expdataset_id=created_datasets[item["dataset_index"]].uuid,
                            datapoint_id=dp_objs[i].uuid
                        )
                    )

                if dataset_dp_relations:
                    ExpDataset.data_points.through.objects.bulk_create(
                        dataset_dp_relations)

        # 7. 批量创建数据点属性映射
        if datapoint_property_maps and created_prop_maps:
            dp_prop_map_objs = []
            for i, item in enumerate(datapoint_property_maps):
                if (i >= len(dp_objs) if 'dp_objs' in locals() else True):
                    continue
                if item["property_map_index"] >= len(created_prop_maps):
                    continue

                dp_prop_map_objs.append(
                    DataPointExpDatasetPropertyMap(
                        data_point=dp_objs[i],
                        exp_dataset_property_map=created_prop_maps[item["property_map_index"]],
                        value=item["value"],
                        measurement_error_value=item["measurement_error_value"]
                    )
                )

            if dp_prop_map_objs:
                DataPointExpDatasetPropertyMap.objects.bulk_create(
                    dp_prop_map_objs)


def convert_note_to_json():
    matched_files = glob.glob(
        "../final_basic_tables/A079. PCdata基表-7-标注表*.xlsx")
    latest_file = max(matched_files, key=lambda x: x.split(
        "-")[-1]) if matched_files else None
    sheet_name = "Note（入库版）"
    df = pd.read_excel(latest_file, sheet_name=sheet_name,
                       header=0, skiprows=[0, 2], dtype=str)
    start_row = 0
    num_columns = 3
    selected_data = df.iloc[start_row:, :num_columns].fillna("")
    # print(selected_data)
    note_dict = {}
    for index, row in selected_data.iterrows():
        nNoteId = row["nNoteID"]
        sNote = row["sNoteEN"]
        cnNote = row["sNote"]
        note_dict[nNoteId] = cnNote+"("+sNote+")"
    with open("../dippr_tables/dippr_note.json", "w", encoding="utf-8") as json_file:
        json.dump(note_dict, json_file, indent=4)


def convert_dippr_id_2_ref_id_json():
    matched_files = glob.glob(
        "../final_basic_tables/A075. PCdata基表-3-参考文献表*.xlsx")
    latest_file = max(matched_files, key=lambda x: x.split(
        "-")[-1]) if matched_files else None
    sheet_name = "文献总表"
    df = pd.read_excel(latest_file, sheet_name=sheet_name,
                       header=0, skiprows=[1], dtype=str)
    start_row = 1
    num_columns = 48
    selected_data = df.iloc[start_row:, :num_columns].fillna("")
    dippr_id_2_ref_id_json = {}
    for index, row in selected_data.iterrows():
        if row["DIPPR_ID"] != "" and row["DIPPR_ID"] != "NA":
            dippr_id_2_ref_id_json[row["DIPPR_ID"]] = row["nPCdataCitID"]
    with open(
        "../dippr_tables/dippr_id_2_ref_id.json", "w", encoding="utf-8"
    ) as json_file:
        json.dump(dippr_id_2_ref_id_json, json_file, indent=4)


def convert_dippr_const_property_data_to_json():
    folder_path = "../dippr_tables/"
    file_paths = glob.glob(os.path.join(
        folder_path, "3.Const Properties Details*.xlsx"))
    sheet_name = "Sheet1"
    df = pd.read_excel(file_paths[0], sheet_name=sheet_name,
                       header=2, dtype=str)
    start_row = 0

    num_columns = 22
    const_property_data_list = []
    selected_data = df.iloc[start_row:, :num_columns].fillna("")
    for index, row in selected_data.iterrows():
        if row["Acceptance"] != "A":
            continue
        const_property_data_item = {}
        const_property_data_item["com_id"] = row["DIPPR ID"]
        const_property_data_item["prop_abb"] = row["eAppPropAbbreviation"]
        const_property_data_item["value"] = row["Value"]
        const_property_data_item["unit_id"] = row["nUnitIDCombine"]
        const_property_data_list.append(const_property_data_item)

    with open(
        "../dippr_tables/const_property_data_list.json", "w"
    ) as json_file:
        json.dump(const_property_data_list, json_file, indent=4)


def init_dippr_const_property():
    super_admin_entity = User.objects.filter(user_account="superadmin").first()

    note_json_path = "../dippr_tables/const_property_data_list.json"
    with open(note_json_path, "r", encoding="utf-8") as file:
        const_property_data_list = json.load(file)

    unit_1_entity = Unit.objects.filter(sei_id="U-0-1").first()
    if unit_1_entity is None:
        print("unit symbol 1 not exist")
        return

    # 先将物性全部创建到常量物性里面
    prop_abb_set = set()
    for const_property_item in const_property_data_list:
        prop_abb_set.add(const_property_item["prop_abb"])
    prop_abb_list = list(prop_abb_set)
    const_property_entity_list = []
    for prop_abb in prop_abb_list:
        const_property_entity = ComponentConstProperty.objects.filter(
            abbreviation=prop_abb)
        if const_property_entity == None:
            property_entity = Property.objects.filter(
                abbreviation=prop_abb).first()
            if property_entity == None:
                print("error property abb:", prop_abb)
            const_property_entity = ComponentConstProperty(
                name=property_entity.name,
                name_en=property_entity.name_en,
                abbreviation=property_entity.abbreviation,
                quantity=property_entity.quantity,
                data_type="float",
                common_used=True,
                is_show_in_detail=False,
                description=property_entity.description,
                description_en=property_entity.description_en,
                comment=property_entity.comment,
                create_person=super_admin_entity,
                modify_person=super_admin_entity,
            )
            const_property_entity_list.append(const_property_entity)
    try:
        with transaction.atomic():
            ComponentConstProperty.objects.bulk_create(
                const_property_entity_list)
    except Exception as e:
        traceback.print_exc()


def init_dippr_const_property_data():
    super_admin_entity = User.objects.filter(user_account="superadmin").first()

    note_json_path = "../dippr_tables/const_property_data_list.json"
    with open(note_json_path, "r", encoding="utf-8") as file:
        const_property_data_list = json.load(file)

    unit_1_entity = Unit.objects.filter(sei_id="U-0-1").first()
    if unit_1_entity is None:
        print("unit symbol 1 not exist")
        return
    dippr_attribute_entity = Attribute.objects.filter(
        name_en="nDipprID").first()
    if dippr_attribute_entity is None:
        print("dippr attribute not exist")
        return

    # 先获取所有的应用物性并做映射
    const_property_entity_list = ComponentConstProperty.objects.filter(
        valid_flag=True)
    const_property_abb_entity_map = {}
    for const_property_entity in const_property_entity_list:
        const_property_abb_entity_map[const_property_entity.abbreviation] = (
            const_property_entity
        )

    # 获取所有的单位并作映射
    unit_entity_list = Unit.objects.filter(valid_flag=True)
    unit_sei_id_entity_map = {}
    for unit_entity in unit_entity_list:
        if unit_entity.sei_id != "":
            unit_sei_id_entity_map[unit_entity.sei_id] = unit_entity

    blocks = []
    current_block = []
    block_size = 1000
    for item in tqdm(const_property_data_list, desc="Processing const property data"):
        component_entity_attribute_map = ComponentAttributeValue.objects.filter(
            attribute=dippr_attribute_entity, attribute_value_char=item["com_id"]
        ).first()
        if component_entity_attribute_map is None:
            print("error at component dippr id :", item["com_id"])
            continue
        else:
            component_entity = component_entity_attribute_map.componentidentifier
        const_property_entity = const_property_abb_entity_map[item["prop_abb"]]
        if item["unit_id"] == "":
            unit_entity = unit_1_entity
        else:
            unit_entity = unit_sei_id_entity_map[item["unit_id"]]
        if (
            ComponentConstPropertyValue.objects.filter(
                component=component_entity,
                property=const_property_entity,
            ).first()
            != None
        ):
            continue
        if item["value"] == "":
            continue

        component_constproperty_value_entity = ComponentConstPropertyValue(
            component=component_entity,
            property=const_property_entity,
            property_value_float=float(item["value"]),
            property_unit=unit_entity,
        )
        current_block.append(component_constproperty_value_entity)
        if len(current_block) >= block_size:
            blocks.append(current_block)
            current_block = []
    if current_block != []:
        blocks.append(current_block)

    block_num = len(blocks)
    for index in range(block_num):
        if index < -1:
            continue
        try:
            with transaction.atomic():
                ComponentConstPropertyValue.objects.bulk_create(blocks[index])
        except Exception as e:
            traceback.print_exc()
            print("error in block:", index)
            return


def convert_dippt_T_data_to_json():
    folder_path = "../dippr_tables/"
    file_paths = glob.glob(os.path.join(
        folder_path, "5.Tdep Data Sets*.xlsx"))

    sheet_name = "入库结构化表"
    df = pd.read_excel(file_paths[0], sheet_name=sheet_name,
                       header=2, dtype=str)
    start_row = 0
    end_row = 54
    # end_row = 13
    num_columns = 26
    const_property_data_list = []
    selected_data = df.iloc[start_row:, :num_columns].fillna("")
    for index, row in selected_data.iterrows():
        const_property_data_item = {}
        const_property_data_item["DIPPR_ID"] = row["DIPPR ID"]
        const_property_data_item["Compound"] = row["Compound"]
        const_property_data_item["Property"] = row["Property"]
        const_property_data_item["ePropAbbreviation"] = row["ePropAbbreviation"]
        const_property_data_item["sPhaseSymbol"] = row["sPhaseSymbol"]
        const_property_data_item["sPhaseSymbol2"] = row["sPhaseSymbol2"]
        const_property_data_item["T_value"] = row["T"]
        const_property_data_item["property_value"] = row["Value"]
        const_property_data_item["unit_id"] = row["nPCdataUnitID"]
        const_property_data_item["Error"] = row["Error"]
        const_property_data_item["Note"] = row["Note"]
        const_property_data_item["dippr_ref_id"] = row["Ref"]
        const_property_data_item["Acceptance"] = row["Acceptance"]
        const_property_data_item["Data_Type"] = row["Data Type"]
        const_property_data_list.append(const_property_data_item)

    with open("../dippr_tables/property_T_data.json", "w") as json_file:
        json.dump(const_property_data_list, json_file, indent=4)


def convert_dippt_t_data_to_group_json():
    note_json_path = "../dippr_tables/property_T_data.json"
    with open(note_json_path, "r", encoding="utf-8") as file:
        dippr_t_property_data_list = json.load(file)

    # 进行分组，获取所有的单位id和物性缩写 ，相态符号，数据类型，
    unit_id_set = set()
    property_abb_set = set()
    phase_symbol_set = set()
    data_type_set = set()

    groups = defaultdict(list)
    for item in tqdm(dippr_t_property_data_list, desc="Processing t property data"):
        key = (
            item["DIPPR_ID"],
            item["ePropAbbreviation"],
            item["sPhaseSymbol"],
            item["sPhaseSymbol2"],
            item["dippr_ref_id"],
            item["Acceptance"],
        )
        groups[key].append(item)
        if item["unit_id"] != "":
            unit_id_set.add(item["unit_id"])
        if item["ePropAbbreviation"] != "":
            property_abb_set.add(item["ePropAbbreviation"])
        if item["sPhaseSymbol"] != "":
            phase_symbol_set.add(item["sPhaseSymbol"])
        if item["sPhaseSymbol2"] != "":
            phase_symbol_set.add(item["sPhaseSymbol2"])
        if item["Data_Type"] != "":
            data_type_set.add(item["Data_Type"])

    # 将分组结果转换为字典形式（可选）
    grouped_dict = {
        f"Group_{'_'.join(key)}": group for key, group in groups.items()}
    sum_dict = {
        "unit_id_list": list(unit_id_set),
        "ePropAbbreviation_list": list(property_abb_set),
        "sPhaseSymbol_list": list(phase_symbol_set),
        "Data_Type_list": list(data_type_set),
        "grouped_data": grouped_dict,
    }

    # 将分组结果存储到JSON文件
    output_file = "../dippr_tables/grouped_property_T_data.json"
    with open(output_file, "w", encoding="utf-8") as f:
        json.dump(sum_dict, f, indent=4, ensure_ascii=False)

    print(f"分组结果已存储到 {output_file}")


def init_dippr_t_data():
    super_admin_entity = User.objects.filter(user_account="superadmin").first()

    # 加载数据文件
    note_json_path = "../dippr_tables/grouped_property_T_data.json"
    with open(note_json_path, "r", encoding="utf-8") as file:
        grouped_property_T_data = json.load(file)

    unit_id_list = grouped_property_T_data["unit_id_list"]
    ePropAbbreviation_list = grouped_property_T_data["ePropAbbreviation_list"]
    sPhaseSymbol_list = grouped_property_T_data["sPhaseSymbol_list"]
    Data_Type_list = grouped_property_T_data["Data_Type_list"]
    grouped_data = grouped_property_T_data["grouped_data"]

    # 加载其他辅助数据
    with open("../dippr_tables/dippr_note.json", "r", encoding="utf-8") as file:
        note_dict = json.load(file)
    with open("../dippr_tables/dippr_id_2_ref_id.json", "r", encoding="utf-8") as file:
        dippr_ref_2_ref_id_dict = json.load(file)

    # 获取基础实体
    dippr_attribute_entity = Attribute.objects.filter(
        name_en="nDipprID").first()
    if dippr_attribute_entity is None:
        print("dippr attribute not exist")
        return

    variable_temperature_entity = PropertyVariable.objects.filter(
        symbol="T").first()
    if variable_temperature_entity is None:
        print("variable temperature not exist")
        return

    unit_K_entity = Unit.objects.filter(sei_id="U-9-1").first()
    if unit_K_entity is None:
        print("unit symbol K not exist")
        return

    # 获取单位映射
    unit_entity_list = Unit.objects.filter(
        valid_flag=True, sei_id__in=unit_id_list)
    unit_sei_id_entity_map = {unit.sei_id: unit for unit in unit_entity_list}

    if len(unit_entity_list) != len(unit_id_list):
        print("unit match error!")
        return

    # 获取物性映射
    property_entity_list = Property.objects.filter(
        valid_flag=True, abbreviation__in=ePropAbbreviation_list
    )
    property_abb_entity_map = {
        prop.abbreviation: prop for prop in property_entity_list}
    if len(property_entity_list) != len(ePropAbbreviation_list):
        print("property match error!")
        return

    # 补丁 - 创建V1相态如果不存在
    if Phase.objects.filter(abbreviation="V1").first() is None:
        Phase.objects.create(
            name="V1(未命名)",
            name_en="V1()",
            abbreviation="V1",
            is_pure_used=True,
            is_binary_used=True,
            is_ternary_used=True,
            is_multi_used=True,
            is_reaction_used=True,
            is_complex_used=True,
            create_person=super_admin_entity,
            modify_person=super_admin_entity,
        )

    # 获取相态映射
    phase_entity_list = Phase.objects.filter(
        abbreviation__in=sPhaseSymbol_list)
    phase_symbol_entity_map = {
        phase.abbreviation: phase for phase in phase_entity_list}
    if len(phase_entity_list) != len(sPhaseSymbol_list):
        print("phase match error!")
        return

    # 获取或创建数据类型
    data_type_entity_map = {}
    for data_type in Data_Type_list:
        normalized_type = {
            "Not Specified": "Unspecified",
            "Experiment": "Experimental"
        }.get(data_type, data_type)

        if not normalized_type:
            continue

        data_present_way_entity = DataPresentWay.objects.filter(
            name=normalized_type).first()
        if data_present_way_entity is None:
            data_present_way_entity = DataPresentWay.objects.create(
                name=normalized_type,
                creator=super_admin_entity,
                last_editor=super_admin_entity,
            )
        data_type_entity_map[data_type] = data_present_way_entity

    # 获取未知数据类型实体
    data_present_unknow_entity = DataPresentWay.objects.filter(
        name="Unknown").first()
    if data_present_unknow_entity is None:
        data_present_unknow_entity = DataPresentWay.objects.create(
            name="Unknown", creator=super_admin_entity, last_editor=super_admin_entity
        )

    # 获取others目的实体
    purpose_others_entity = ExpPurpose.objects.filter(name="others").first()
    if purpose_others_entity is None:
        purpose_others_entity = ExpPurpose.objects.create(
            name="others",
            creator=super_admin_entity,
            last_editor=super_admin_entity,
        )

    print("extra data finish")

    # 预加载所有组件属性映射
    component_map = {
        attr.attribute_value_char: attr.componentidentifier
        for attr in ComponentAttributeValue.objects.filter(
            attribute=dippr_attribute_entity,
            attribute_value_char__in=[item[0]["DIPPR_ID"]
                                      for item in grouped_data.values()]
        ).select_related('componentidentifier')
    }

    # 预加载文献引用
    mapped_ref_ids = [
        str(dippr_ref_2_ref_id_dict[ref_id])
        for ref_id in {item[0]["dippr_ref_id"] for item in grouped_data.values() if item[0]["dippr_ref_id"]}
        if ref_id in dippr_ref_2_ref_id_dict
    ]

    # 批量查询
    ref_map = {
        dippr_ref_id: lit
        for lit in Literature.objects.filter(liter_code__in=mapped_ref_ids)
        for dippr_ref_id, mapped_id in dippr_ref_2_ref_id_dict.items()
        if str(mapped_id) == lit.liter_code
    }

    # 准备批量创建的数据
    datasets_to_create = []
    dataset_phase_relations = []
    dataset_component_relations = []
    dataset_property_relations = []
    dataset_variable_relations = []

    datapoints_to_create = []
    datapoint_property_relations = []
    datapoint_variable_relations = []
    databank_entity = DataBank.objects.filter(name="基础物性数据集").first()
    for index, (key, item) in enumerate(tqdm(grouped_data.items(), desc="Processing const property data")):
        try:
            item_first = item[0]

            # 获取组件
            component_entity = component_map.get(item_first["DIPPR_ID"])
            if component_entity is None:
                print("error at component dippr id:", item_first["DIPPR_ID"])
                continue

            # 处理相态
            phases = []
            if item_first["sPhaseSymbol"]:
                phases.append(
                    phase_symbol_entity_map[item_first["sPhaseSymbol"]])
            if item_first["sPhaseSymbol2"]:
                phases.append(
                    phase_symbol_entity_map[item_first["sPhaseSymbol2"]])

            # 获取物性和单位
            property_entity = property_abb_entity_map[item_first["ePropAbbreviation"]]
            property_unit_entity = unit_sei_id_entity_map[item_first["unit_id"]]

            # 处理描述和注释
            description = ""
            comment = ""
            if item_first["Note"]:
                note = note_dict.get(item_first["Note"], "")
                if note:
                    description = note + "\n"
                    comment = note + "\n"
            if item_first["Acceptance"]:
                comment += "Acceptance" + item_first["Acceptance"]

            # 处理数据类型
            data_type = {
                "Not Specified": "Unspecified",
                "Experiment": "Experimental",
                "": "Unknown"
            }.get(item_first["Data_Type"], item_first["Data_Type"])

            data_present_way_entity = data_type_entity_map.get(
                item_first["Data_Type"], data_present_unknow_entity)

            # 获取文献引用
            dippr_reference_entity = ref_map.get(item_first["dippr_ref_id"])

            # 创建数据集名称
            dataset_name = f"{item_first['Compound']}--{item_first['Property']}--Temperature"

            # 准备数据集对象
            dataset = ExpDataset(
                name=dataset_name,
                zh_name=dataset_name,
                description=description,
                comment=comment,
                databank = databank_entity,
                source_liter=dippr_reference_entity,
                purpose=purpose_others_entity,
                data_present_way=data_present_way_entity,
                review_status=1,
                creator=super_admin_entity,
                last_editor=super_admin_entity,
            )
            datasets_to_create.append(dataset)

            # 记录关联关系
            for phase in phases:
                dataset_phase_relations.append(
                    (len(datasets_to_create)-1, phase))

            dataset_component_relations.append(
                (len(datasets_to_create)-1, component_entity))

            # 准备属性映射
            dataset_property_relations.append((
                len(datasets_to_create)-1,
                property_entity,
                property_unit_entity
            ))

            # 准备变量映射
            dataset_variable_relations.append((
                len(datasets_to_create)-1,
                variable_temperature_entity,
                unit_K_entity
            ))

            # 准备数据点
            for point_item in item:
                datapoint = DataPoint()
                datapoints_to_create.append(datapoint)
                dp_index = len(datapoints_to_create)-1

                # 记录数据点与数据集的关系 (稍后处理)

                # 处理变量值
                if point_item["T_value"]:
                    datapoint_variable_relations.append((
                        dp_index,
                        len(datasets_to_create)-1,  # dataset index
                        variable_temperature_entity,
                        unit_K_entity,
                        float(point_item["T_value"])
                    ))

                # 处理属性值
                if point_item["property_value"]:
                    error = point_item["Error"] if point_item["Error"] else None
                    datapoint_property_relations.append((
                        dp_index,
                        len(datasets_to_create)-1,  # dataset index
                        property_entity,
                        property_unit_entity,
                        point_item["property_value"],
                        error
                    ))

        except Exception as e:
            print("error at index:", index)
            print("error at group:", key)
            traceback.print_exc()
            return

    # 批量创建所有数据集
    with transaction.atomic():
        print("Creating datasets in bulk...")
        created_datasets = ExpDataset.objects.bulk_create(datasets_to_create)

        # 建立数据集与相态的关系
        print("Creating dataset-phase relations...")
        dataset_phase_m2m = []
        for dataset_idx, phase in dataset_phase_relations:
            dataset_phase_m2m.append(created_datasets[dataset_idx].phases.through(
                expdataset_id=created_datasets[dataset_idx].uuid,
                phase_id=phase.uuid
            ))
        if dataset_phase_m2m:
            ExpDataset.phases.through.objects.bulk_create(dataset_phase_m2m)

        # 建立数据集与组件的关系
        print("Creating dataset-component relations...")
        dataset_component_maps = [
            ExpDatasetComponentMap(
                exp_dataset=created_datasets[dataset_idx],
                component=component
            )
            for dataset_idx, component in dataset_component_relations
        ]
        if dataset_component_maps:
            ExpDatasetComponentMap.objects.bulk_create(dataset_component_maps)

        # 建立数据集与属性的关系
        print("Creating dataset-property relations...")
        dataset_property_maps = []
        for dataset_idx, prop, unit in dataset_property_relations:
            dataset_property_maps.append(ExpDatasetPropertyMap(
                exp_dataset=created_datasets[dataset_idx],
                property=prop,
                unit=unit,
                measurement_error_percent=True
            ))
        if dataset_property_maps:
            created_prop_maps = ExpDatasetPropertyMap.objects.bulk_create(
                dataset_property_maps)

        # 建立数据集与变量的关系
        print("Creating dataset-variable relations...")
        dataset_variable_maps = []
        for dataset_idx, var, unit in dataset_variable_relations:
            dataset_variable_maps.append(ExpDatasetVariableMap(
                exp_dataset=created_datasets[dataset_idx],
                variable=var,
                unit=unit
            ))
        if dataset_variable_maps:
            created_var_maps = ExpDatasetVariableMap.objects.bulk_create(
                dataset_variable_maps)

        # 批量创建所有数据点
        print("Creating datapoints in bulk...")
        created_datapoints = DataPoint.objects.bulk_create(
            datapoints_to_create)

        # 建立数据点与数据集的关系
        print("Linking datapoints to datasets...")
        dataset_datapoint_m2m = []
        current_dataset_index = 0
        datapoints_per_dataset = [len(item) for item in grouped_data.values()]

        for i, count in enumerate(datapoints_per_dataset):
            dataset = created_datasets[i]
            start_idx = sum(datapoints_per_dataset[:i])
            end_idx = start_idx + count
            for dp in created_datapoints[start_idx:end_idx]:
                dataset_datapoint_m2m.append(dataset.data_points.through(
                    expdataset_id=dataset.uuid,
                    datapoint_id=dp.uuid
                ))
        if dataset_datapoint_m2m:
            ExpDataset.data_points.through.objects.bulk_create(
                dataset_datapoint_m2m)

        # 建立数据点与变量的关系
        print("Creating datapoint-variable relations...")
        dp_var_maps = []
        for dp_idx, ds_idx, var, unit, value in datapoint_variable_relations:
            # 找到对应的ExpDatasetVariableMap
            var_map = created_var_maps[ds_idx]
            dp_var_maps.append(DataPointExpDatasetVariableMap(
                data_point=created_datapoints[dp_idx],
                exp_dataset_variable_map=var_map,
                value=value
            ))
        if dp_var_maps:
            DataPointExpDatasetVariableMap.objects.bulk_create(dp_var_maps)

        # 建立数据点与属性的关系
        print("Creating datapoint-property relations...")
        dp_prop_maps = []
        for dp_idx, ds_idx, prop, unit, value, error in datapoint_property_relations:
            # 找到对应的ExpDatasetPropertyMap
            prop_map = created_prop_maps[ds_idx]
            if error:
                dp_prop_maps.append(DataPointExpDatasetPropertyMap(
                    data_point=created_datapoints[dp_idx],
                    exp_dataset_property_map=prop_map,
                    value=value,
                    measurement_error_value=error
                ))
            else:
                dp_prop_maps.append(DataPointExpDatasetPropertyMap(
                    data_point=created_datapoints[dp_idx],
                    exp_dataset_property_map=prop_map,
                    value=value
                ))
        if dp_prop_maps:
            DataPointExpDatasetPropertyMap.objects.bulk_create(dp_prop_maps)

    print("All data imported successfully!")


def init_dippr_const_content():
    print("初始化所属专题数据库")
    init_oil_product_component_group()
    print("初始化dippr对齐数据转化json格式")
    convert_note_to_json()
    convert_dippr_id_2_ref_id_json()
    print("dippr对齐数据处理完毕")
    print("初始化dippr const data数据")
    init_dippr_const_data()
    print("dippr const data数据处理完毕")
    print("初始化dippr const property数据")
    convert_dippr_const_property_data_to_json()
    init_dippr_const_property()
    init_dippr_const_property_data()
    print("dippr const data数据处理完毕")
    print("初始化dippr T data数据")
    convert_dippt_T_data_to_json()
    convert_dippt_t_data_to_group_json()
    init_dippr_t_data()
    print("dippr T data数据处理完毕")
