package com.navinfo.tripanalysis.offline.service.impl;

import com.navinfo.tripanalysis.common.pojo.Point;
import com.navinfo.tripanalysis.common.pojo.PointProtocol;
import com.navinfo.tripanalysis.common.util.CommonUtils;
import com.navinfo.tripanalysis.offline.pojo.*;
import com.navinfo.tripanalysis.offline.service.LoadPointProtocolService;
import com.navinfo.tripanalysis.offline.service.PointProtocolConvertService;
import com.navinfo.tripanalysis.offline.util.PointUtils;
import lombok.Data;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import scala.Tuple2;

import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;

/**
 * 通过Hive加载公共轨迹协议数据
 * @author web
 */
@Data
public class LoadPointProtocolServiceImpl extends AbstractHiveLoadService implements LoadPointProtocolService {
    /**
     * 数据转换实现
     */
    protected PointProtocolConvertService service;

    @Override
    public JavaRDD<Tuple2<Long, List<PointProtocol>>> load(SparkSession spark, JavaSparkContext jsc, LoadDataParam param) {
        return super.loadData(spark, param).mapPartitionsToPair((PairFlatMapFunction<Iterator<Row>, Long, PointProtocol>) rowIt -> new Iterator<Tuple2<Long, PointProtocol>>() {
            @Override
            public boolean hasNext() {
                return rowIt.hasNext();
            }

            @Override
            public Tuple2<Long, PointProtocol> next() {
                PointProtocol point = service.fromRow(rowIt.next());
                return new Tuple2<>(point.getTid(), point);
            }
        }).aggregateByKey(new ArrayList<>(), (Function2<List<PointProtocol>, PointProtocol, List<PointProtocol>>) (ps, p) -> {
            ps.add(p);
            return ps;
        }, (Function2<List<PointProtocol>, List<PointProtocol>, List<PointProtocol>>) (p1, p2) -> {
            p1.addAll(p2);
            return p1;
        }).mapPartitions(it -> new Iterator<Tuple2<Long, List<PointProtocol>>>() {
            @Override
            public boolean hasNext() {
                return it.hasNext();
            }
            @Override
            public Tuple2<Long, List<PointProtocol>> next() {
                Tuple2<Long, List<PointProtocol>> next = it.next();
                return new Tuple2<>(next._1(), StreamSupport.stream(next._2().spliterator(), false).sorted(Point.COMPARATOR).collect(Collectors.toList()));
            }
        });
    }

    @Override
    protected List<String> getSelectColumns() {
        return service.getColNamesList(service.createStructTypeList());
    }

    @Override
    protected String getWhereSql(LoadDataParam param) {
        List<Long> tidList = param.getTidList();
        String batchWhere = PointUtils.getBatchWhereSql(param);

        if(StringUtils.isEmpty(batchWhere)){
            batchWhere = "";
        }else{
            //修改分区字段名
            List<String> partitionCols = service.getPartitionCols();
            if(null!=partitionCols && partitionCols.size()>=2){
                batchWhere = batchWhere.replace("part_time", partitionCols.get(0));
                batchWhere = batchWhere.replace("hashtid", partitionCols.get(1)) +" and ";
            }
        }

        StringBuilder sb = new StringBuilder(batchWhere)
                .append(" tid!='0' and tid is not null and tripId!='0' and tripId is not null and gpsTime!='0' and gpsTime is not null ")
                .append(CommonUtils.isCollectionEmpty(tidList) ? "" : String.format(" and tid in(%s) ", CommonUtils.mkString(tidList, ",")));

        return sb.toString();
    }

    @Override
    protected List<String> getHDFSRealPaths(LoadDataParam param) {
        List<String> hdfsPath = PointUtils.getHDFSPath(getHdfsPath(), param);

        //修改分区字段名
        if(null!=hdfsPath && hdfsPath.size()>0){
            List<String> partitionCols = service.getPartitionCols();
            if(null!=partitionCols && partitionCols.size()>=2){
                for(int i=0,size=hdfsPath.size(); i<size; i++){
                    String tmp = hdfsPath.get(i);
                    tmp = tmp.replace("part_time", partitionCols.get(0));
                    tmp = tmp.replace("hashtid", partitionCols.get(1));
                    hdfsPath.set(i,tmp);
                }
            }
        }
        return hdfsPath;
    }

    @Override
    protected String getHDFSWhereSql(LoadDataParam param){
        List<Long> tidList = param.getTidList();
        StringBuilder sb = new StringBuilder(" tid!='0' and tid is not null and tripId!='0' and tripId is not null and gpsTime!='0' and gpsTime is not null ")
                .append(CommonUtils.isCollectionEmpty(tidList) ? "" : String.format(" and tid in(%s) ", CommonUtils.mkString(tidList, ",")));

        return sb.toString();
    }

}
