package com.navinfo.opentsp.platform.computing.analysis.service;

import com.navinfo.opentsp.platform.computing.analysis.entity.disconnectedVehicle.ProcessedInfo;
import com.navinfo.opentsp.platform.computing.analysis.util.BigDataUtils;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;

/**
 * 异常断连/长停断连，断连原因判断<br/>
 * 数据落盘service
 */
public class DisconnectedVehicleSaveService implements Serializable {
    private final String dbName;
    private final String tableName;

    public DisconnectedVehicleSaveService(String dbName, String tableName) {
        this.dbName = dbName;
        this.tableName = tableName;
    }

    private static StructType createSchema() {
        List<StructField> schema = new ArrayList<>();
        schema.add(new StructField("uid", DataTypes.LongType, true, Metadata.empty()));
        schema.add(new StructField("tid", DataTypes.LongType, true, Metadata.empty()));
        schema.add(new StructField("connected", DataTypes.IntegerType, true, Metadata.empty()));
        schema.add(new StructField("lack_of_electricity", DataTypes.IntegerType, true, Metadata.empty()));
        schema.add(new StructField("modified", DataTypes.IntegerType, true, Metadata.empty()));
        schema.add(new StructField("poor_signal", DataTypes.IntegerType, true, Metadata.empty()));
        schema.add(new StructField("abnormal_disconnection", DataTypes.IntegerType, true, Metadata.empty()));
        schema.add(new StructField("long_stay_disconnection", DataTypes.IntegerType, true, Metadata.empty()));
        schema.add(new StructField("alarm", DataTypes.LongType, true, Metadata.empty()));
        schema.add(new StructField("update_time", DataTypes.LongType, true, Metadata.empty()));
        return new StructType(schema.toArray(new StructField[schema.size()]));
    }

    /**
     * 创建一行数据
     */
    private static Row createRow(ProcessedInfo processedInfo, StructType schema) {
        List<Object> list = new ArrayList<>();
        list.add(processedInfo.getTid());
        list.add(processedInfo.getTid());
        list.add(processedInfo.getConnected());
        list.add(processedInfo.getLackOfElectricity());
        list.add(processedInfo.getModified());
        list.add(processedInfo.getPoorSignal());
        list.add(processedInfo.getAbnormalDisconnection());
        list.add(processedInfo.getLongStayDisconnection());
        list.add(processedInfo.getAlarm());
        list.add(System.currentTimeMillis());

        return new GenericRowWithSchema(list.toArray(), schema);
    }

    public void save(SparkSession spark, JavaRDD<ProcessedInfo> saveDataRdd) {
        final StructType schema = createSchema();
        JavaRDD<Row> rows = saveDataRdd.mapPartitions((FlatMapFunction<Iterator<ProcessedInfo>, Row>) iterator ->
                new Iterator<Row>() {
                    @Override
                    public boolean hasNext() {
                        return iterator.hasNext();
                    }

                    @Override
                    public Row next() {
                        ProcessedInfo processedInfo = iterator.next();
                        return createRow(processedInfo, schema);
                    }
                });
        Dataset<Row> dataSet = spark.createDataFrame(rows, schema);
        BigDataUtils.saveHiveTable(spark, dataSet,
                BigDataUtils.CoalesceType.repartation,
                1,
                this.dbName,
                this.tableName,
                null,
                null,
                Arrays.asList(schema.fieldNames())
        );
    }
}
