package com.navinfo.opentsp.platform.computing.analysis.service;

import com.navinfo.opentsp.platform.computing.analysis.entity.cleanAsh.AshLoadingsInfo;
import com.navinfo.opentsp.platform.computing.analysis.entity.cleanAsh.CleanAshEvent;
import com.navinfo.opentsp.platform.computing.analysis.util.BigDataUtils;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.Serializable;
import java.util.*;

/**
 * DPF清灰事件<br/>
 * 数据落盘service
 */
public class CleanAshSaveService implements Serializable {
    private static final Logger logger = LoggerFactory.getLogger(CleanAshSaveService.class);
    private final String dbName;
    private final String infoTableName;
    private final String eventTableName;

    public CleanAshSaveService(String dbName, String infoTableName, String eventTableName) {
        this.dbName = dbName;
        this.infoTableName = infoTableName;
        this.eventTableName = eventTableName;
    }

    public void save(SparkSession spark, JavaRDD<AshLoadingsInfo> saveDataRdd) {
        final StructType schema = Info.createSchema();
        JavaRDD<Row> rows = saveDataRdd.mapPartitions((FlatMapFunction<Iterator<AshLoadingsInfo>, Row>) iterator ->
                new Iterator<Row>() {
                    @Override
                    public boolean hasNext() {
                        return iterator.hasNext();
                    }

                    @Override
                    public Row next() {
                        return Info.createRow(iterator.next(), schema);
                    }
                });
        Dataset<Row> dataSet = spark.createDataFrame(rows, schema);
        logger.error("save info:" + dataSet.count());
        BigDataUtils.saveHiveTable(spark, dataSet,
                BigDataUtils.CoalesceType.repartation,
                1,
                this.dbName,
                this.infoTableName,
                null,
                null,
                Arrays.asList(schema.fieldNames())
        );
    }

    public void save(SparkSession spark, JavaRDD<CleanAshEvent> saveDataRdd, String day) {
        final StructType schema = Event.createSchema();
        JavaRDD<Row> rows = saveDataRdd.mapPartitions((FlatMapFunction<Iterator<CleanAshEvent>, Row>) iterator ->
                new Iterator<Row>() {
                    @Override
                    public boolean hasNext() {
                        return iterator.hasNext();
                    }

                    @Override
                    public Row next() {
                        return Event.createRow(iterator.next(), schema);
                    }
                });
        Dataset<Row> dataSet = spark.createDataFrame(rows, schema);
        logger.error("save event:" + dataSet.count());
        BigDataUtils.saveHiveTable(spark, dataSet,
                BigDataUtils.CoalesceType.repartation,
                1,
                this.dbName,
                this.eventTableName,
                Collections.singletonList("day"),
                Collections.singletonList(day),
                Arrays.asList(schema.fieldNames())
        );
    }

    private static class Info {
        private static StructType createSchema() {
            List<StructField> schema = new ArrayList<>();
            schema.add(new StructField("uid", DataTypes.LongType, true, Metadata.empty()));
            schema.add(new StructField("tid", DataTypes.LongType, true, Metadata.empty()));
            schema.add(new StructField("ash_load", DataTypes.StringType, true, Metadata.empty()));
            schema.add(new StructField("update_time", DataTypes.LongType, true, Metadata.empty()));
            return new StructType(schema.toArray(new StructField[schema.size()]));
        }

        private static Row createRow(AshLoadingsInfo ashLoadingsInfo, StructType schema) {
            List<Object> list = new ArrayList<>();
            list.add(ashLoadingsInfo.getTid());
            list.add(ashLoadingsInfo.getTid());
            list.add(ashLoadingsInfo.getAshLoads());
            list.add(System.currentTimeMillis());
            return new GenericRowWithSchema(list.toArray(), schema);
        }
    }

    private static class Event {
        private static StructType createSchema() {
            List<StructField> schema = new ArrayList<>();
            schema.add(new StructField("uid", DataTypes.LongType, true, Metadata.empty()));
            schema.add(new StructField("tid", DataTypes.LongType, true, Metadata.empty()));
            schema.add(new StructField("start_time", DataTypes.LongType, true, Metadata.empty()));
            schema.add(new StructField("end_time", DataTypes.LongType, true, Metadata.empty()));
            schema.add(new StructField("engine_running_time", DataTypes.DoubleType, true, Metadata.empty()));
            schema.add(new StructField("total_mileage", DataTypes.DoubleType, true, Metadata.empty()));
            schema.add(new StructField("ash_loading_bef", DataTypes.IntegerType, true, Metadata.empty()));
            schema.add(new StructField("ash_loading_aft", DataTypes.IntegerType, true, Metadata.empty()));
            return new StructType(schema.toArray(new StructField[schema.size()]));
        }

        private static Row createRow(CleanAshEvent cleanAshEvent, StructType schema) {
            List<Object> list = new ArrayList<>();
            list.add(cleanAshEvent.getTid());
            list.add(cleanAshEvent.getTid());
            list.add(cleanAshEvent.getStartTime());
            list.add(cleanAshEvent.getEndTime());
            list.add(cleanAshEvent.getEngineRunningTime());
            list.add(cleanAshEvent.getTotalMileage());
            list.add(cleanAshEvent.getAshLoadingBef());
            list.add(cleanAshEvent.getAshLoadingAft());
            return new GenericRowWithSchema(list.toArray(), schema);
        }
    }
}
