package com.yomob.kylin;

import java.time.LocalDate;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import com.yomob.kylin.facility.Consts;
import com.yomob.kylin.service.*;
import com.yomob.kylin.support.Metric;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.storage.StorageLevel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.mongodb.MongoClient;
import com.mongodb.MongoClientURI;
import com.mongodb.client.MongoDatabase;
import com.yomob.kylin.client.MongoDBClient;
import com.yomob.kylin.client.SparkClient;
import com.yomob.kylin.support.OptionSupport;

import static org.apache.spark.sql.functions.col;

/**
 * Spark job entry class.
 */
public class Application {
    private static final Logger LOGGER = LoggerFactory.getLogger(Application.class);
    //data counter type at raw data
    private static final Object[] counterids = {"active", "yomob_payment", "ad_adclick", "cp_adclick", "newplayer_backend"};

    /**
     * args ,must set mongodb db Name,dataSet path,data scope,and collected data type(we named is as metrics)
     * mongodb db Name,--mongoDB=testdb
     * dataSet on hadoop path,--hdfsPath=/alldata
     * under the hdfsPath，sub-directory name by date,such as '20170901','20181211'
     * --startDate=2017-11-18
     * --endDate=2017-11-18
     * spark data set cache level,it's not necessary.--storageLevel=MEMORY_ONLY
     * --metrics=new_user,ad_clicks,payment,active_user
     * @param args
     * @throws Exception
     */
    public static void main(String[] args) throws Exception {

        // Parse arguments
        OptionSupport.parse(args);
        // Initialize logger config
        SparkSession.builder().initializeLogIfNecessary(true);

        LOGGER.info(">>> Options: {}", OptionSupport.getOptions());
        // Create spark session
        SparkSession sparkSession = SparkSession.builder().getOrCreate();
        //init mongo client
        MongoClient mongoClient = new MongoClient(new MongoClientURI(OptionSupport.getMongoUri()));
        MongoDatabase mongoDatabase = mongoClient.getDatabase(OptionSupport.getMongoDb());
        LocalDate startDate = OptionSupport.getStartDate();
        LocalDate endDate = OptionSupport.getEndDate();
        List<Metric> metrics = OptionSupport.getMetrics();
        Map<Metric, SimpleService> services = new HashMap<>();
        //init spark client
        SparkClient sparkClient = new SparkClient(sparkSession);
        MongoDBClient mongoDBClient = new MongoDBClient(mongoDatabase);
        services.put(Metric.ACTIVE_USER, new ActiveService(mongoDBClient));
        services.put(Metric.AD_CLICKS, new AdClickedService(mongoDBClient));
        services.put(Metric.NEW_USER, new NewUserService(mongoDBClient));
        services.put(Metric.PAYMENT, new PaymentService(mongoDBClient));

        LOGGER.info("Loading data...");
        Dataset<Row> input = sparkClient.getDataset(startDate, endDate);
        if (input == null) {
            throw new Exception("Empty dataset!");
        }
        LOGGER.info("Filter data...");
        Dataset<Row> filtered = input.filter(col(Consts.Fields.COUNTERID).isin(counterids));
        LOGGER.info("Caching data...");
        filtered.persist(StorageLevel.fromString(OptionSupport.getStorageLevel().name()));
        LOGGER.info(">>> Execute service start. metrics={}, startDate={}, endDate={}.", metrics, startDate, endDate);
        for (Metric metric : metrics) {
            SimpleService ss = services.get(metric);
            LOGGER.info("Service {} execution start.", ss.getName());
            ss.execute(filtered);
            LOGGER.info("Service {} execution finished.", ss.getName());
        }
        LOGGER.trace("Uncaching data...");
        filtered.unpersist();
        sparkSession.stop();
        LOGGER.info(">>> Execute service finished. metrics={}, startDate={}, endDate={}.", metrics, startDate, endDate);

        //对mongo 数据进行更新,appType,统计类数据
    }
}
