package com.navinfo.tripanalysis.offline.service.impl;

import com.navinfo.tripanalysis.common.arithmetic.common.OuterStatisticData;
import com.navinfo.tripanalysis.common.util.DateUtils;
import com.navinfo.tripanalysis.offline.service.TripStatisticExtendService;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import scala.Tuple2;

import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;

/**
 * 抽象的行程统计的扩展信息服务
 * @author: web
 * @date: 2020-05-26
 **/
public abstract class AbstractTripStatisticExtendService extends AbstractHiveSaveService implements TripStatisticExtendService {

    public void save(SparkSession spark, JavaSparkContext jsc, JavaPairRDD<Long, List<OuterStatisticData>> pairRDD, long d) {
        long start = System.currentTimeMillis();
        logger.error("进行行程统计的扩展信息落盘，保存Hive开始...");

        //保存数据
        String day = DateUtils.format(new Date(d), DateUtils.DateFormat.YYYYMMDD);
        saveData(spark, pairRDD, hdfsPath +"/day="+day, false, Collections.singletonList("day"),  Collections.singletonList("'"+day+"'"));

        logger.error("进行行程统计的扩展信息落盘，保存Hive数据结束，耗{}ms", System.currentTimeMillis()-start);
    }

    @Override
    public JavaRDD<Row> createRowRDD(JavaPairRDD resourceRDD) {
        return resourceRDD.flatMap((FlatMapFunction<Tuple2<Long, List<OuterStatisticData>>, Row>) longListTuple2 -> {
            List<OuterStatisticData> srcList = longListTuple2._2;
            List<Row> list = new ArrayList<>(srcList.size());
            for (OuterStatisticData m : srcList) {
                list.add(toRow(m, null));
            }
            return list.iterator();
        });
    }
}
