package com.navinfo.opentsp.platform.computing.analysis.service;

import com.navinfo.opentsp.platform.computing.analysis.entity.cleanAsh.AshLoadingsInfo;
import com.navinfo.opentsp.platform.computing.analysis.entity.cleanAsh.PointForCleanAsh;
import com.navinfo.opentsp.platform.computing.analysis.util.CommonUtil;
import com.navinfo.opentsp.platform.computing.analysis.util.PropertiesUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;

/**
 * DPF清灰事件<br/>
 * 从数据源获取数据service
 */
public class CleanAshLoadDataService implements Serializable {
    private static final List<String> COL_NAMES;

    static {
        COL_NAMES = Arrays.asList("terminalid", "gpsdate", "status", "mileage", "enginerunningtime", "dpfashloading");
    }

    private final String pointDbName;
    private final String pointTableName;
    private final String saveDbName;

    public CleanAshLoadDataService(String pointDbName, String pointTableName, String saveDbName) {
        this.pointDbName = pointDbName;
        this.pointTableName = pointTableName;
        this.saveDbName = saveDbName;
    }

    /**
     * 获取上次处理过的灰载量数组
     */
    public JavaRDD<AshLoadingsInfo> loadInfo(SparkSession spark) {
        String infoHql = PropertiesUtil.getProperties("cleanAsh.hive.data.hql.info");
        infoHql = String.format(infoHql, this.saveDbName);
        Dataset<Row> dataSet = spark.sql(infoHql);
        return dataSet.toJavaRDD().map(row -> {
            AshLoadingsInfo ashLoadingsInfo = new AshLoadingsInfo();
            ashLoadingsInfo.setTid(row.getAs("tid"));
            ashLoadingsInfo.setAshLoads(row.getAs("ash_load"));
            return ashLoadingsInfo;
        });
    }

    /**
     * 读0200位置点数据获取必要信息
     */
    public JavaRDD<PointForCleanAsh> loadPoint(SparkSession spark, final String day) {
        StringBuilder sb = new StringBuilder();
        sb.append(String.format("select %s from ", String.join(",", COL_NAMES)));
        if (StringUtils.isNotEmpty(this.pointDbName)) {
            sb.append(this.pointDbName).append(".");
        }
        sb.append(this.pointTableName);
        sb.append(String.format("\n where part_time = (%s)", day));
        sb.append(" and ").append(getWhereSql0200());

        return toJavaRdd(spark.sql(sb.toString()));
    }

    private JavaRDD<PointForCleanAsh> toJavaRdd(Dataset<Row> dataSet) {
        return dataSet.toJavaRDD().mapPartitions((FlatMapFunction<Iterator<Row>, PointForCleanAsh>) iterator -> new Iterator<PointForCleanAsh>() {
                    @Override
                    public boolean hasNext() {
                        return iterator.hasNext();
                    }

                    @Override
                    public PointForCleanAsh next() {
                        return getPoint(iterator.next());
                    }
                }
        );
    }

    private PointForCleanAsh getPoint(Row row) {
        PointForCleanAsh point = new PointForCleanAsh();
        point.setTid(row.getAs("terminalid"));
        point.setGpsTime(((long) row.getAs("gpsdate")) * 1000L);
        point.setAccStatus((int) CommonUtil.getToBinary(row.getAs("status"), 0));
        point.setGpsMileage(BigDecimal.valueOf(row.getAs("mileage")).divide(
                new BigDecimal("1000"), 1, BigDecimal.ROUND_DOWN).doubleValue());
        point.setEngineRunningTime(row.getAs("enginerunningtime"));
        point.setDpfAshLoading(row.getAs("dpfashloading"));
        return point;
    }

    /**
     * 获取过滤条件
     */
    private String getWhereSql0200() {
        return "gpsdate!=0 and gpsdate is not null and terminalid!=0 and terminalid is not null ";
    }
}
