package com.navinfo.tripanalysis.offline.service.impl;

import com.navinfo.tripanalysis.common.util.DateUtils;
import com.navinfo.tripanalysis.offline.pojo.*;
import com.navinfo.tripanalysis.offline.service.SaveDataCleaning0200DetailService;
import com.navinfo.tripanalysis.offline.util.BigDataUtils;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.hamcrest.internal.ArrayIterator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.Tuple2;

import java.io.Serializable;
import java.util.*;

/**
 * 数据清洗：0200明细数据落盘
 */
public class SaveDataCleaning0200DetailServiceImpl extends AbstractHiveSaveService implements Serializable, SaveDataCleaning0200DetailService {

    static final Logger logger = LoggerFactory.getLogger(SaveDataCleaning0200DetailServiceImpl.class);

    @Override
    public void save(SparkSession spark, JavaPairRDD<Long, Abnormal0200DataDetail> pairRDD, long d) {
        long start = System.currentTimeMillis();
        logger.error("进行0200数据清洗明细信息落盘，保存Hive开始...");

        String day = DateUtils.format(new Date(d), DateUtils.DateFormat.YYYYMMDD);
        String outPath = hdfsPath + "/day=" + day;

        //保存数据
        saveData(spark, pairRDD, outPath, false,Collections.singletonList("day"),Collections.singletonList(day));
        logger.error("进行0200数据清洗明细信息落盘，保存Hive数据结束，耗{}ms", System.currentTimeMillis()-start);

    }

    @Override
    public void dropPartitions(SparkSession spark, long d) {
        long start = System.currentTimeMillis();
        logger.error("删除0200数据清洗明细信息当日分区开始...");

        String day = DateUtils.format(new Date(d), DateUtils.DateFormat.YYYYMMDD);

        //删除当日数据分区
        BigDataUtils.dropHiveTablePartition(spark,hiveDbName,hiveTableName,Collections.singletonList("day"),Collections.singletonList(day));
        logger.error("删除0200数据清洗明细信息当日分区结束，耗{}ms", System.currentTimeMillis()-start);
    }

    /**
     * 创建一行数据
     *
     * @param m
     * @param schema
     * @return
     */
    public static Row createRow(Abnormal0200DataDetail m, StructType schema) {
        List<Object> list = new ArrayList<>();
        list.add(m.getTid());
        list.add(m.getPoint_count());
        list.add(m.getRepeated_gpstime_count());
        list.add(m.getMileage_avgspeedRange_150_200_count());
        list.add(m.getMileage_avgspeedRange_200_500_count());
        list.add(m.getMileage_avgspeedRange_500_count());
        list.add(m.getFuelRange_150_200_count());
        list.add(m.getFuelRange_200_500_count());
        list.add(m.getFuelRange_500_count());
        list.add(m.getLat_lon_Range_count());
        return new GenericRowWithSchema(list.toArray(), schema);
    }

    protected static StructType create0200Schema() {

        List<StructField> list = new ArrayList<>();
        list.add(new StructField("tid", DataTypes.LongType, true, Metadata.empty()));
        list.add(new StructField("point_count", DataTypes.IntegerType, true, Metadata.empty()));
        list.add(new StructField("repeated_gpstime_count", DataTypes.IntegerType, true, Metadata.empty()));
        list.add(new StructField("mileage_avgspeedRange_150_200_count", DataTypes.IntegerType, true, Metadata.empty()));
        list.add(new StructField("mileage_avgspeedRange_200_500_count", DataTypes.IntegerType, true, Metadata.empty()));
        list.add(new StructField("mileage_avgspeedRange_500_count", DataTypes.IntegerType, true, Metadata.empty()));
        list.add(new StructField("fuelRange_150_200_count", DataTypes.IntegerType, true, Metadata.empty()));
        list.add(new StructField("fuelRange_200_500_count", DataTypes.IntegerType, true, Metadata.empty()));
        list.add(new StructField("fuelRange_500_count", DataTypes.IntegerType, true, Metadata.empty()));
        list.add(new StructField("lat_lon_Range_count",DataTypes.IntegerType,true,Metadata.empty()));

        return new StructType(list.toArray(new StructField[list.size()]));

    }

    @Override
    protected StructType createSchema() {
        return create0200Schema();
    }

    @Override
    protected JavaRDD<Row> createRowRDD(JavaPairRDD resourceRDD) {
        return resourceRDD.filter((Function<Tuple2<Long,Abnormal0200DataDetail>, Boolean>)  t -> t._2 != null)
                .map((Function<Tuple2<Long,Abnormal0200DataDetail>, Row>)e -> createRow(e._2,createSchema()));
    }
}
