package com.navinfo.platform.demo.service.impl;

import com.mongodb.spark.rdd.api.java.JavaMongoRDD;
import com.navinfo.platform.common.service.ICommonService;
import com.navinfo.platform.common.service.IDataHandleService;
import com.navinfo.platform.common.utils.DateUtils;
import com.navinfo.platform.dataanalysis.dto.MileageOilDto;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import org.bson.Document;

import java.util.List;
import java.util.Map;

import static org.apache.spark.sql.functions.col;

/***
 * @author gx
 */
public enum DemoService implements IDataHandleService {
    //
    INSTANCE;

    @SuppressWarnings("unchecked")
    @Override
    public void handle(List<Object> obj, Map<String, String> configMap, SparkSession spark) {
        Dataset<Row> dataset = (Dataset<Row>) obj.get(0);
//        dataset.selectExpr("terminalid+100","speed").show();
        JavaMongoRDD<Document> javaMongoRDD = (JavaMongoRDD<Document>) obj.get(1);
        System.out.println(javaMongoRDD.take(10));
        String day = configMap.get(ICommonService.DAY_VALUE);
        Dataset<Row> mysqlData = (Dataset<Row>) obj.get(2);
        mysqlData.show();
        String date = DateUtils.format(day, "yyyyMMdd", "yyyy-MM-dd");
        //测试存数据到本地
        JavaRDD<MileageOilDto> mileageOilDtoJavaRDD = javaMongoRDD.map(d->{
            d.put("date", date);
            return MileageOilDto.parseRow(d);
        });
        dataset.show();
        dataset.select(col("terminalid").plus(400).as("terminalid"),col("speed")).show();
        dataset.select(col("terminalid").plus(400).as("terminalid"),col("speed")).write().mode(SaveMode.Overwrite).format("parquet").save("hdfs://hd23.aerozhong.com/tmp/parquet/20181129");
    }
}
