package com.cl.ks.flow.handler;

import com.alibaba.fastjson.JSONObject;
import com.cl.ks.entity.SparkNode;
import com.cl.ks.entity.SparkRDDInfo;
import com.cl.ks.flow.base.BaseFlowNodeHandler;
import com.cl.ks.flow.enums.NodeHandlerEnum;
import com.cl.ks.service.SparkRDDInfoService;
import com.cl.ks.service.SysSourceConfigService;
import com.cl.ks.service.SysSourceMappingTableFieldService;
import com.cl.ks.service.SysSourceMappingTableService;
import com.cl.ks.utils.DataSourceMap;
import com.cl.ks.utils.DbUtil;
import com.cl.spark.dto.SparkResult;
import com.querydsl.jpa.impl.JPAQueryFactory;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;


@Component
public class ReadPersistDataHandler extends BaseFlowNodeHandler {
    private final DbUtil dbUtil;
    private final JPAQueryFactory queryFactory;
    private final CreateTableHandler createTableHandler;
    private final PushClueHandler pushClueHandler;
    private final SparkSession sparkSession;
    private final SparkRDDInfoService sparkRDDInfoService;
    @Autowired
    DataSourceMap dataSourceMap;

    @Autowired
    private SysSourceConfigService sysSourceConfigService;
    @Autowired
    private SysSourceMappingTableService sysSourceMappingTableService;
    @Autowired
    private SysSourceMappingTableFieldService sysSourceMappingTableFieldService;

    public ReadPersistDataHandler(DbUtil dbUtil, JPAQueryFactory queryFactory, CreateTableHandler createTableHandler, PushClueHandler pushClueHandler, SparkSession sparkSession, SparkRDDInfoService sparkRDDInfoService) {
        this.dbUtil = dbUtil;
        this.queryFactory = queryFactory;
        this.createTableHandler = createTableHandler;
        this.pushClueHandler = pushClueHandler;
        this.sparkSession = sparkSession;
        this.sparkRDDInfoService = sparkRDDInfoService;
    }


    @Override
    public SparkResult process(ProcessParam processParam) {
        SparkNode sparkNode = processParam.getSparkNode();
        JSONObject nodeExpression = sparkNode.getNodeExpression();
        String code = nodeExpression.getString("code");
        SparkRDDInfo sparkRddInfo = sparkRDDInfoService.findByField("code", code).get(0);
        String filePath = sparkRddInfo.getFilePath();
        SparkResult sparkResult = new SparkResult();
        try {
            Dataset<Row> persistDataset = sparkSession.read().parquet(filePath);
            sparkResult.setDataset(persistDataset);
            sparkResult.setCount(persistDataset.count());
        } catch (Exception e) {
            e.printStackTrace();
            throw new RuntimeException("持久化文件不存在");
        }
        return sparkResult;
    }


    @Override
    public NodeHandlerEnum getType() {
        return NodeHandlerEnum.READ_PERSIST_DATA;
    }
}
