package com.navinfo.tripanalysis.service.impl;

import com.mongodb.client.MongoCollection;
import com.mongodb.client.model.Filters;
import com.mongodb.client.result.DeleteResult;
import com.mongodb.spark.MongoConnector;
import com.mongodb.spark.MongoSpark;
import com.mongodb.spark.config.WriteConfig;
import com.navinfo.tripanalysis.common.arithmetic.common.OuterStatisticData;
import com.navinfo.tripanalysis.common.arithmetic.convert.StatisticExtendedDataMongoConvert;
import com.navinfo.tripanalysis.common.util.DateUtils;
import com.navinfo.tripanalysis.service.SaveTripService;
import com.navinfo.tripanalysis.util.MongoUtils;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.SparkSession;
import org.bson.Document;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.Tuple2;

import java.io.Serializable;
import java.util.*;

/**
 * 保存行程扩展信息到mongo
 */
public class SaveTripExtendServiceMongoImpl implements SaveTripService, Serializable {
    private static final Logger logger = LoggerFactory.getLogger(SaveTripExtendServiceMongoImpl.class);
    private Properties config;
    private String collectionNamePrefix;

    public Properties getConfig() {
        return config;
    }

    public void setConfig(Properties config) {
        this.config = config;
    }

    public String getCollectionNamePrefix() {
        return collectionNamePrefix;
    }

    public void setCollectionNamePrefix(String collectionNamePrefix) {
        this.collectionNamePrefix = collectionNamePrefix;
    }

    @Override
    public void save(SparkSession spark, JavaSparkContext jsc, JavaPairRDD<Long, List<OuterStatisticData>> tripRDD, long d) {
        long start = System.currentTimeMillis();
        logger.error("进行行程扩展统计信息落盘，保存Mongo开始...");

        String dayStr = DateUtils.format(new Date(d), DateUtils.DateFormat.YYYYMMDD);
        String collectionName = getCollectionNamePrefix() + MongoUtils.getWeekTableName(d);

        Map<String, String> map = new HashMap<>();
        map.put("collection", collectionName);
        WriteConfig writeConfig = WriteConfig.create(jsc).withOptions(map);

        //按天删除旧数据
        MongoConnector.apply(jsc.sc()).withCollectionDo(writeConfig, Document.class, (Function<MongoCollection<Document>, Object>) collection -> {
            DeleteResult result = collection.deleteMany(Filters.eq("day", dayStr));
            logger.info("delete:{}", result);
            return result;
        });
        logger.error("进行行程扩展统计信息落盘，删除mongo旧数据耗时{}ms", System.currentTimeMillis() - start);

        JavaRDD<Document> documents = tripRDD
                .flatMap((FlatMapFunction<Tuple2<Long, List<OuterStatisticData>>, OuterStatisticData>) e -> e._2().iterator())
                .mapPartitions((FlatMapFunction<Iterator<OuterStatisticData>, Document>) it ->
                        new Iterator<Document>() {
                            @Override
                            public boolean hasNext() {
                                return it.hasNext();
                            }

                            @Override
                            public Document next() {
                                return StatisticExtendedDataMongoConvert.toDocument(it.next());
                            }
                        });

        MongoSpark.save(documents, writeConfig);
        logger.error("进行行程扩展统计信息处理，保存Mongo耗时{}ms", System.currentTimeMillis() - start);
    }
}
