package com.navinfo.opentsp.platform.computing.analysis.application;

import com.mongodb.spark.MongoSpark;
import com.mongodb.spark.config.ReadConfig;
import com.mongodb.spark.rdd.api.java.JavaMongoRDD;
import com.lc.core.protocol.common.LCAMTDataUploadOld;
import com.navinfo.opentsp.platform.computing.analysis.util.GpsTransform;
import com.navinfo.opentsp.platform.computing.analysis.util.PropertiesUtil;
import com.navinfo.opentsp.platform.location.protocol.common.LCAMTDataUpload;
import com.navinfo.opentsp.platform.location.protocol.common.LCLocationData;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.SparkSession;
import org.bson.Document;
import org.bson.types.Binary;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import scala.Tuple2;

import java.text.SimpleDateFormat;
import java.util.*;

/**
 * @author chen jie
 * Created by chen on 2019/7/29.
 * 密集历史数据迁移
 */
public class MongoToKafkaD007 {

    public static void main(String[] args) {
        String day = args[0];
//        String day = "20190501";
        String mongoUri_D007 = PropertiesUtil.getProperties("mongoUri_D007");
        SparkSession spark = SparkSession.builder()
                .appName("MongoToKafkaD007")
//                .master("local")
                .config("spark.mongodb.input.uri",mongoUri_D007)
                .config("spark.mongodb.input.partitioner","MongoPaginateBySizePartitioner")
                .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
                .getOrCreate();
        try{
            JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());
            exec(day,jsc);
        }
        catch (Exception e){
            System.out.println(e.getMessage());
        }
        finally {
            spark.stop();
        }
    }

    private static void exec(String day, JavaSparkContext jsc) {
        Map<String, String> readOverrides = new HashMap<>();
        String topic = PropertiesUtil.getProperties("d007.topic");
        String kafka = PropertiesUtil.getProperties("kafka");
        readOverrides.put("collection","AmtUpload_"+day.substring(0,6));
        ReadConfig readConfig = ReadConfig.create(jsc).withOptions(readOverrides);
//        {beginDate:{$gte:1557279186000,$lte:1557379669000}}
//        Document filter = Document.parse("{ $match: {day : '"+ day + "'} }");
        long start = 0L;
        long end = 0L;
        try {
            String res;
            SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyyMMdd");
            Date date = simpleDateFormat.parse(day);
            start = date.getTime()/1000;
            end = start+ 60*60*24;

        }catch(Exception e){

        }
        Document filter = Document.parse("{ $match: {'gpsDate': {$gte:"+start+",$lte:"+end+"}}}");

       JavaMongoRDD<Document> rdd = MongoSpark.load(jsc,readConfig).withPipeline(Collections.singletonList(filter));
        System.out.println(day +"数据量================"+rdd.count());
        rdd.mapPartitionsToPair((PairFlatMapFunction< Iterator<Document>,String, List<Binary>>) (Iterator<Document> itor) -> {
            List<Tuple2<String, List<Binary>>> ret = new ArrayList<>();
            while(itor.hasNext()){
                Document doc = itor.next();
                String tid = String.valueOf(doc.get("terminalId"));
                Binary data = (Binary) doc.get("content");
                ArrayList<Binary> list = new ArrayList();
                list.add(data);
                ret.add(Tuple2.apply(tid, list));
            }
            return ret.iterator();
        }).reduceByKey((Function2<List<Binary>, List<Binary>, List<Binary>>) (elements1, elements2) -> {
            elements1.addAll(elements2);
            return elements1;
        }).foreachPartition(new VoidFunction<Iterator<Tuple2<String, List<Binary>>>>() {

            @Override
            public void call(Iterator<Tuple2<String, List<Binary>>> itor) throws Exception {
                Map<String, Object> props = new HashMap<>();
                props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka);
//        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "10.30.50.21:9092");
                props.put(ProducerConfig.RETRIES_CONFIG, 0);
                props.put(ProducerConfig.BATCH_SIZE_CONFIG, 100000);
                props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
                props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
                props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
                props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "com.navinfo.opentsp.platform.computing.analysis.entity.ProtoBufSerializer");
                props.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, 30000);
                KafkaTemplate<String, Object> kafkaTemplate = new KafkaTemplate<String, Object>(new DefaultKafkaProducerFactory<>(props));
                while (itor.hasNext()) {
                    Tuple2<String, List<Binary>> tuple2 = itor.next();
                    String tid = tuple2._1;
                    List<Binary> dataList = tuple2._2;
                    for (Binary binary : dataList) {
                        LCAMTDataUploadOld.AMTDataUpload amtDataUploadOld = null;
                        LCAMTDataUpload.AMTDataUpload amtDataUpload = null;
                        try {
                            amtDataUploadOld = LCAMTDataUploadOld.AMTDataUpload.parseFrom(binary.getData());
                            amtDataUpload = LCAMTDataUpload.AMTDataUpload.parseFrom(binary.getData());

                        } catch (Exception e) {
                            e.printStackTrace();
                            continue;
                        }

                        //新老pb转换
                        LCAMTDataUpload.AMTDataUpload newAmtDataUpload = amtDataUpload.toBuilder()
                                .setSerialNumber((int)amtDataUploadOld.getGpsDate())
                                .build();
                        // 纠偏处理
                        kafkaTemplate.send(topic, tid + "", newAmtDataUpload.toByteArray());

                    }
                }
            }
        });
    }
}
