package com.navinfo.tripanalysis.offline.service.impl;

import com.navinfo.platform.hbase.api.HbaseTemplate;
import com.navinfo.tripanalysis.common.arithmetic.common.OuterEventData;
import com.navinfo.tripanalysis.offline.service.SaveTripEventService;
import com.navinfo.tripanalysis.offline.service.TripEventConvertService;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.Tuple2;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;

/**
 * 保存事件数据到Hbase中
 * @author ws
 */
public class SaveTripEventServiceHBaseImpl implements SaveTripEventService {
    private static final Logger logger = LoggerFactory.getLogger(SaveTripEventServiceHBaseImpl.class);

    public static final String SAVE_TYPE_1 = "type1";
    public static final String SAVE_TYPE_2 = "type2";

    private String port;
    private String quorum;
    private String parent;
    private String tableName;
    private String saveType ;
    private TripEventConvertService convertService;

    public SaveTripEventServiceHBaseImpl(TripEventConvertService convertService) {
        this.convertService = convertService;
    }

    public String getTableName() {
        return tableName;
    }
    public void setTableName(String tableName) {
        this.tableName = tableName;
    }
    public String getQuorum() {
        return quorum;
    }
    public void setQuorum(String quorum) {
        this.quorum = quorum;
    }
    public String getPort() {
        return port;
    }
    public void setPort(String port) {
        this.port = port;
    }
    public String getParent() {
        return parent;
    }
    public void setParent(String parent) {
        this.parent = parent;
    }
    public String getSaveType() {
        return saveType;
    }
    public void setSaveType(String saveType) {
        this.saveType = saveType;
    }



    @Override
    public void save(SparkSession spark, JavaSparkContext jsc, JavaPairRDD<Long, List<OuterEventData>> eventRDD, long dateTime) {
        long start = System.currentTimeMillis();
        logger.error("进行行程事件数据落盘，保存HBase开始...saveType:" + saveType);

        if (StringUtils.equalsIgnoreCase(SAVE_TYPE_1, getSaveType())) {
            eventRDD.foreachPartition((VoidFunction<Iterator<Tuple2<Long, List<OuterEventData>>>>) tuple2Iterator -> {
                Configuration configuration = HBaseConfiguration.create();
                configuration.set("hbase.zookeeper.quorum", getQuorum());
                configuration.set("hbase.zookeeper.property.clientPort", getPort());
                configuration.set("zookeeper.znode.parent", getParent());
                HbaseTemplate hbaseTemplate = new HbaseTemplate(configuration);

                while (tuple2Iterator.hasNext()) {
                    List<OuterEventData> eventList = tuple2Iterator.next()._2;
                    List<Mutation> saveOrUpdates = new ArrayList<>();

                    for (OuterEventData eventData :eventList) {
                        if (eventData != null) {
                            saveOrUpdates.add(convertService.toPut(eventData));
                        }
                    }
                    hbaseTemplate.saveOrUpdates(getTableName(), saveOrUpdates);
                    logger.info("事件数量：{}", saveOrUpdates.size());
                }
            });
        } else if (StringUtils.equalsIgnoreCase(SAVE_TYPE_2, saveType)) {
            JavaPairRDD<ImmutableBytesWritable, Put> hbasePuts = eventRDD.flatMapToPair((PairFlatMapFunction<Tuple2<Long, List<OuterEventData>>, ImmutableBytesWritable, Put>) longListTuple2
                    -> longListTuple2._2.stream().map(m -> new Tuple2<>(new ImmutableBytesWritable(), convertService.toPut(m))).iterator());

            try {
                Configuration conf = HBaseConfiguration.create();
                conf.set("hbase.zookeeper.quorum", getQuorum());
                conf.set("hbase.zookeeper.property.clientPort", getPort());
                conf.set("zookeeper.znode.parent", getParent());
                Job newAPIJob = Job.getInstance(conf);
                newAPIJob.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE, getTableName());
                newAPIJob.setOutputFormatClass(TableOutputFormat.class);

                hbasePuts.saveAsNewAPIHadoopDataset(newAPIJob.getConfiguration());
            } catch (IOException e) {
                e.printStackTrace();
                logger.error("使用Hbase新API保存失败",e);
            }
        } else {
            logger.error("不支持的保存类型saveType:{}",saveType);
        }

        logger.error("进行行程事件数据落盘，保存HBase耗时{}ms", (System.currentTimeMillis() - start));
    }
}
