package com.navinfo.tripanalysis.offline.service.impl;

import com.navinfo.tripanalysis.common.pojo.Point0F39;
import com.navinfo.tripanalysis.common.util.DateUtils;
import com.navinfo.tripanalysis.offline.service.SavePointFaultService;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.StructType;
import scala.Tuple2;

import java.util.Collections;
import java.util.Date;
import java.util.Iterator;

/**
 * 故障点落盘接口实现类
 * @author hmc
 */
public class SavePointFaultServiceImpl extends AbstractHiveSaveService implements SavePointFaultService {
    @Override
    public void save(SparkSession spark, JavaPairRDD<String,Point0F39> pair0F39RDD, long day) {
        long start = System.currentTimeMillis();
        logger.error("进行故障数据落盘，保存Hive开始...");

        //保存数据
        String saveday = DateUtils.format(new Date(day), DateUtils.DateFormat.YYYYMMDD);
        super.saveData(spark, pair0F39RDD, hdfsPath +"/day="+saveday, false, Collections.singletonList("part_time"),  Collections.singletonList("'"+saveday+"'"));

        logger.error("进行故障数据落盘，保存Hive数据结束，耗{}ms", System.currentTimeMillis()-start);
    }

    @Override
    protected StructType createSchema() {
        return Point0F39.createSchema();
    }

    @Override
    protected JavaRDD<Row> createRowRDD(JavaPairRDD resourceRDD) {
        return resourceRDD.mapPartitions((FlatMapFunction<Iterator<Tuple2<String, Point0F39>>, Row>) itor ->
                new Iterator<Row>() {
                    @Override
                    public boolean hasNext() {
                        return itor.hasNext();
                    }
                    @Override
                    public Row next() {
                        Point0F39 p = itor.next()._2;
                        return Point0F39.createRow(p);
                    }
                });
    }
}
