package com.navinfo.platform.basicdatastatistics.service.impl;


import com.navinfo.platform.basicdatastatistics.dto.AbstractBasicDataDto;
import com.navinfo.platform.basicdatastatistics.enums.DataTypeEnum;
import com.navinfo.platform.basicdatastatistics.service.IDataStatisticsService;
import com.navinfo.platform.basicdatastatistics.utils.ServiceRegister;
import com.navinfo.platform.common.service.ICommonService;
import com.navinfo.platform.common.service.IDataHandleService;
import com.navinfo.platform.common.utils.DateUtils;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.StructType;
import scala.Tuple3;

import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;


public abstract class DefaultBasicDataHandleService implements IDataHandleService {
    public final static String DAY = "day";
    public final static String WEEK = "week";
    public final static String MONTH = "month";

    @Override
    public void handle(List<Object> obj, Map<String, String> configMap, SparkSession spark) {
        JavaSparkContext context = new JavaSparkContext(spark.sparkContext());
        final Broadcast<Map<String, String>> configBroadcast = context.broadcast(configMap);
        Dataset<Row> baiscDataset = castValue(obj.get(0));
        Tuple3<DataTypeEnum, String, String> dataTypeEnumString = getDataType();
        DataTypeEnum dataType = dataTypeEnumString._1();
        String dayType = dataTypeEnumString._2();
        String tableNameVar = dataTypeEnumString._3();
        String tableNameKey = "hdfs.vehicle.%s.save.tableName";
        tableNameKey = String.format(tableNameKey, tableNameVar);
        String day = DateUtils.getStartEndDay(configMap.get(ICommonService.DAY_VALUE), dayType)._1;
        JavaRDD<Row> saveDataRDD = baiscDataset.toJavaRDD().mapPartitions(rowToBasicDataDto(dataType, day, configBroadcast));
        IDataStatisticsService dataStatisticsService = ServiceRegister.getInstance(dataType, configMap, IDataStatisticsService.class);
        StructType schema = dataStatisticsService.createSchema();
        Dataset<Row> saveDataset = spark.createDataFrame(saveDataRDD, schema);
        BasicDataService.saveData(spark,saveDataset, configMap, tableNameKey, day, schema);
    }

    @SuppressWarnings("unchecked")
    public static FlatMapFunction<Iterator<Row>, Row> rowToBasicDataDto(final DataTypeEnum dataType, final String day, final Broadcast<Map<String, String>> configBroadcast) {
        return itor -> {
            List<Row> basicDataDtos = new ArrayList<>();
            IDataStatisticsService dataStatisticsService = ServiceRegister.getInstance(dataType, configBroadcast.getValue(), IDataStatisticsService.class);
            StructType structType = dataStatisticsService.createSchema();
            while(itor.hasNext()){
                Row row = itor.next();
                AbstractBasicDataDto basicDataDto = dataStatisticsService.convertDataset(row);
                basicDataDto.setDay(Integer.parseInt(day));
                basicDataDtos.add(dataStatisticsService.createRow(basicDataDto, structType));
            }
            return basicDataDtos.iterator();
        };
    }

    public abstract Tuple3<DataTypeEnum, String, String> getDataType();
}
