package com.navinfo.tripanalysis.offline.service.impl;

import com.navinfo.tripanalysis.common.util.CommonUtils;
import com.navinfo.tripanalysis.common.util.DateUtils;
import com.navinfo.tripanalysis.offline.pojo.LoadDataParam;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import scala.Tuple2;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;

/**
 * ETL从hive表加载行程统计或行程事件的抽象类
 * @author 沈东生，web
 */
public abstract class AbstractLoadTripDataService<T> extends AbstractHiveLoadService{

    public JavaPairRDD<Long, List<T>> load(SparkSession spark, LoadDataParam param) {
        return super.loadData(spark, param).mapPartitionsToPair((PairFlatMapFunction<Iterator<Row>, Long, T>) rowIterator -> new Iterator<Tuple2<Long, T>>() {
            @Override
            public boolean hasNext() {
                return rowIterator.hasNext();
            }
            @Override
            public Tuple2<Long, T> next() {
                Row row = rowIterator.next();
                return new Tuple2<>((Long) row.getAs("tid"), fromRow(row));
            }
        }).combineByKey(
            (Function<T, List<T>>) point -> Stream.of(point).collect(Collectors.toList()),
            (Function2<List<T>, T, List<T>>) (points, p) ->{
                points.add(p);
                return points;
            },(Function2<List<T>, List<T>, List<T>>) (p1, p2) ->{
                p1.addAll(p2);
                return p1;
            });
    }

    @Override
    protected List<String> getSelectColumns() {
        return new ArrayList<>(Arrays.asList("*"));
    }

    @Override
    protected String getWhereSql(LoadDataParam param) {
        String dayStr = DateUtils.format(param.getDay(), DateUtils.DateFormat.YYYYMMDD);
        StringBuilder sb = new StringBuilder(String.format(" day='%s'", dayStr));

        List<Long> tidList = param.getTidList();
        if (CommonUtils.isCollectionNotEmpty(tidList)) {
            sb.append(String.format(" and tid in(%s) ", CommonUtils.mkString(tidList, ",")));
        }

        return sb.toString();
    }

    @Override
    protected List<String> getHDFSRealPaths(LoadDataParam param) {
        List<String> path = new ArrayList<>();

        try {
            String dataPathStr = String.format("%s/day=%s", getHdfsPath(), DateUtils.format(param.getDay(), DateUtils.DateFormat.YYYYMMDD));
            FileSystem fs = FileSystem.get(new java.net.URI(getHdfsPath()), new org.apache.hadoop.conf.Configuration());
            if (fs.exists(new Path(dataPathStr))) {
                path.add(dataPathStr);
            } else {
                logger.info("ETL通过HDFS加载数据，dataPathStr:{} is not exist", dataPathStr);
            }
        }catch (Exception e){
            logger.error("ETL通过HDFS加载数据失败,{}",e.getMessage(), e);
        }

        return path;
    }

    @Override
    protected String getHDFSWhereSql(LoadDataParam param){
        List<Long> tidList = param.getTidList();
        return CommonUtils.isCollectionNotEmpty(tidList) ? String.format(" tid in(%s) ", CommonUtils.mkString(tidList, ",")) : "1=1";
    }

    /**
     * 将row转为具体的类型
     * @param row
     * @return
     */
    protected abstract T fromRow(Row row);
}
