package com.navinfo.platform.dataanalysis.service.impl;

import com.navinfo.platform.common.service.IDataHandleService;
import com.navinfo.platform.common.utils.StatisticsDef;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;

import java.util.List;
import java.util.Map;

/**
 * 根据故障日统计信息，生成周或月统计信息
 * @author  web 20181203
 */
public class FaultDataHandleService implements IDataHandleService {

    @Override
    public void handle(List<Object> obj, Map<String, String> configMap, SparkSession spark) {
        String day = configMap.get("args_1");
        String type = configMap.get("args_2");

        String path1 = configMap.get(StatisticsDef.WEEK.equals(type) ? "hive.fault.car.week.path" :  "hive.fault.car.month.path");
        System.out.println("存入车辆类型故障统计信息，day："+ day+"，type："+ type+"，path："+ path1);
        ((Dataset) obj.get(0)).repartition(1).write().mode(SaveMode.Overwrite).parquet(String.format(path1, day));

        String path2 = configMap.get(StatisticsDef.WEEK.equals(type) ? "hive.fault.engine.week.path" :  "hive.fault.engine.month.path");
        System.out.println("存入发动机类型故障统计信息，day："+ day+"，type："+ type+"，path："+ path2);
        ((Dataset) obj.get(1)).repartition(1).write().mode(SaveMode.Overwrite).parquet(String.format(path2, day));


        String path3 = configMap.get(StatisticsDef.WEEK.equals(type) ? "hive.fault.times.week.path" :  "hive.fault.times.month.path");
        System.out.println("存入故障维度统计信息，day："+ day+"，type："+ type+"，path："+ path3);
        ((Dataset) obj.get(2)).repartition(1).write().mode(SaveMode.Overwrite).parquet(String.format(path3, day));
    }
}
