package io.a.sql.smartsense;

import com.google.common.collect.TreeTraverser;
import io.a.entity.DeviceServiceOrg;
import io.a.entity.InspectDay;
import io.a.utils.H;
import lombok.extern.slf4j.Slf4j;
import org.apache.spark.sql.*;

import java.time.LocalDate;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;

import static io.a.utils.Const.*;

/**
 * 构建
 */
@Slf4j
public class SmartSense {

    private static final ConcurrentHashMap<Long, Node> TENANT_NODE_MAP = new ConcurrentHashMap<>();

    private static SparkSession spark;

    private static final DataFrameReader dataFrameReader = init();


    private static final String FORMAT_JDBC = "jdbc";

    private static final String TARGET_TABLE = "r_inspect_day";


    private static DataFrameReader init() {
        spark = SparkSession
                .builder()
                .master("local[*]")
                .appName(SmartSense.class.getName())
                .getOrCreate();

        return spark.read().format(FORMAT_JDBC)
                .option(DRIVER, H.DB.getDriver()).option(URL, H.DB.getUrl())
                .option(USER, H.DB.getUser()).option(PASSWORD, H.DB.getPassword())
                /*.option("queryTimeout", "300")*/;
    }

    private static void finish() {
        if (spark != null) {
            spark.stop();
        }
    }


    public static void main(String[] args) throws AnalysisException {

        final Dataset<Row> deviceWithOrg = dataFrameReader.option("query", "SELECT * FROM v_device_with_org").load();
        Dataset<Row> org = dataFrameReader.option("query", "SELECT * FROM device_service_org").load();

        deviceWithOrg.createOrReplaceTempView("d");
        org.createOrReplaceTempView("o");


        // 获取所有租户id
        Dataset<Row> tenantDS = spark.sql("SELECT DISTINCT tenant_id FROM d");

        Dataset<Long> longDataset = tenantDS.as(Encoders.LONG());
        List<Long> tenantIds = longDataset.collectAsList();

        log.info("租户id列表 : {}", tenantIds);

        // 组装树形结构
        for (Long tenantId : tenantIds) {

            Node node = new Node (tenantId + "", new ArrayList<>());


            Dataset<DeviceServiceOrg> rootOrgDS = spark.sql("SELECT * FROM o WHERE tenant_id=" + tenantId + " AND  pid IS NULL").as(Encoders.bean(DeviceServiceOrg.class));

            rr(rootOrgDS, node, tenantId);

            TENANT_NODE_MAP.put(tenantId, node);
        }


        // 每个租户都需要处理
        for (Long tenantId : tenantIds) {
            List<String> orgIds = getOrgIds(tenantId, null);

            List<InspectDay> inspectDays = new ArrayList<>();

            for (String orgId : orgIds) {
                inspectDays.add(generateInspectDay(LocalDate.now().minusDays(1), orgId, tenantId));
            }

            Dataset<InspectDay> dataset = spark.createDataset(inspectDays, Encoders.bean(InspectDay.class));

            dataset.drop("id").write()
                    .mode(SaveMode.Append)
                    .format(FORMAT_JDBC)
                    .option(DRIVER, H.DB.getDriver()).option(URL, H.DB.getUrl())
                    .option(USER, H.DB.getUser()).option(PASSWORD, H.DB.getPassword())
                    .option("dbtable", TARGET_TABLE)
                    .save();
        }



        finish();
    }


    /**
     * 收集每个指标
     */
    private static InspectDay generateInspectDay(LocalDate localDate, String orgId, Long tenantId) {

        InspectDay inspectDay = new InspectDay();
        inspectDay.setTenant_id(tenantId); // 租户id
        inspectDay.setOrg_id(orgId); //组织机构id，可空
        inspectDay.setDay(localDate);  //日期
        inspectDay.setInspect_num(1L);
        inspectDay.setNotInspect_num(1L);
        inspectDay.setException_num(1L);
        inspectDay.setNew_inspect_num(2L);
        inspectDay.setNew_notInspect_num(3L);
        inspectDay.setNew_exception_num(4L);
        return inspectDay;
    }


    /**
     * 获取组织机构id列表
     * TODO 增加缓存
     *
     * @param tenantId 租户id
     * @param orgId 组织机构id
     * @return 组织机构id列表
     */
    private static List<String> getOrgIds(Long tenantId, String orgId) {
        TreeTraverser<Node> treeTraverser = new TreeTraverser<>() {
            @Override
            public Iterable<Node> children(Node node) {
                return node.getChildren();
            }
        };

        Node node = TENANT_NODE_MAP.get(tenantId);

        for (Node tmpNode : treeTraverser.breadthFirstTraversal(node)) {
            if (orgId != null) {
                if (orgId.equals(tmpNode.getValue())) {
                    return tmpNode.values();
                }
            }else {
                List<String> result = tmpNode.values();
                if (!result.isEmpty()) {
                    result.set(0, null);
                }
                return result;
            }
        }

        return new ArrayList<>();
    }


    /**
     * 递归构建树形结构
     *
     * @param deviceServiceOrgDataset 子节点数据
     * @param parentNode 父级节点
     * @param tenantId 租户id
     */
    private static void rr(Dataset<DeviceServiceOrg> deviceServiceOrgDataset, Node parentNode, long tenantId) {
        for (DeviceServiceOrg deviceServiceOrg : deviceServiceOrgDataset.collectAsList()) {
            String orgId = deviceServiceOrg.getId();
            Node node = new Node(orgId, new ArrayList<>());
            parentNode.getChildren().add(node);


            Dataset<Row> r = spark.sql("SELECT * FROM o WHERE o.tenant_id=" + tenantId + " AND o.pid = '" + orgId + "'");

            Dataset<DeviceServiceOrg> deviceServiceOrgDataset1 = r.as(Encoders.bean(DeviceServiceOrg.class));
            rr(deviceServiceOrgDataset1, node, tenantId);
        }
    }
}
