package com.om.googleAnalytics;

import Utils.HttpClientUtils;
import com.om.googleAnalytics.model.Event;
import com.om.common.PropertiesGetter;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.sql.*;
import org.apache.spark.sql.types.DataTypes;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.xcontent.XContentType;

import java.io.IOException;
import java.net.URL;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Properties;

import static org.apache.spark.sql.functions.*;


public class BigQueryData {
    private static final String hiveTable;
    private static final String esIndex;
    private static final String hiveWriteMode;

    static {
        Properties properties = new PropertiesGetter().getConfig("/GoogleAnalytics/conf.properties");
        hiveTable = properties.getProperty("hive.table");
        esIndex = properties.getProperty("es.index");
        hiveWriteMode = properties.getProperty("hive.write.mode");
    }

    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder().appName("BigQueryData").enableHiveSupport().getOrCreate();
        ObjectMapper objectMapper = new ObjectMapper();

        writeDataToHive(spark, objectMapper, args[0]);

        registerUdf(spark);
        analysisData2Es(spark, args[1]);
    }

    private static void writeDataToHive(SparkSession spark, ObjectMapper objectMapper, String file) {
        ArrayList<Event> eventList = new ArrayList<>();

        try {
            JsonNode nodes = objectMapper.readTree(new URL(file));
            for (JsonNode node : nodes) {
                Event event = objectMapper.readValue(node.toString(), Event.class);

                for (Event.EventParams param : event.getEvent_params()) {
                    switch (param.getKey()) {
                        case "page_title":
                            event.setPage_title(param.getValue().getString_value());
                            break;
                        case "page_location":
                            event.setPage_location(param.getValue().getString_value());
                            break;
                        case "page_referrer":
                            event.setPage_referrer(param.getValue().getString_value());
                            break;
                        case "link_url":
                            event.setLink_url(param.getValue().getString_value());
                            break;
                        default:
                            break;
                    }
                }

                eventList.add(event);
            }

            Dataset<Event> dataset = spark.createDataset(eventList, Encoders.bean(Event.class));

            dataset.write().mode(hiveWriteMode).saveAsTable(hiveTable);

        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    private static void analysisData2Es(SparkSession spark, String eventDate) {
        String sqlStr = String.format("select * from %s where event_date = '%s'", hiveTable, eventDate);
        Dataset<Row> ds = spark.sql(sqlStr);
        Dataset<Event> as = ds.as(Encoders.bean(Event.class));

        Dataset<Row> dsTemp = as.withColumn("created_at", callUDF("timeFormatUDF", col("event_timestamp")))
                .withColumn("operating_system", callUDF("operatingSystemUDF", col("device.category"), col("device.mobile_model_name"), col("device.operating_system_version")))
                .withColumn("is_new_user", callUDF("isNewUserUDF", col("event_timestamp"), col("user_first_touch_timestamp")))
                .withColumn("md5_key", callUDF("keyUDF", col("event_timestamp"), col("event_name"), col("user_pseudo_id")));

        dsTemp.toJSON().foreachPartition(it -> {
            RestHighLevelClient restHighLevelClient = HttpClientUtils.restClient();
            BulkRequest request = new BulkRequest();
            ObjectMapper objectMapper = new ObjectMapper();
            while (it.hasNext()) {
                String next = it.next();
                JsonNode jsonNode = objectMapper.readTree(next);
                request.add(new IndexRequest(esIndex, "_doc", jsonNode.get("md5_key").getTextValue()).source(next, XContentType.JSON));
            }
            if (request.requests().size() != 0)
                restHighLevelClient.bulk(request, RequestOptions.DEFAULT);
            restHighLevelClient.close();
        });
    }

    private static void registerUdf(SparkSession spark) {
        spark.sqlContext().udf().register("timeFormatUDF", (String s) -> {
            DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss+00:00");
            LocalDateTime localDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(Long.parseLong(s) / 1000), ZoneId.systemDefault());
            return dtf.format(localDateTime);
        }, DataTypes.StringType);

        spark.sqlContext().udf().register("operatingSystemUDF", (String category, String mobileModelName, String operatingSystemVersion) -> {
            if (StringUtils.isBlank(operatingSystemVersion)) {
                return "Linux";
            } else if (!StringUtils.isBlank(mobileModelName) && (mobileModelName.equals("iPhone") || mobileModelName.equals("iPad"))) {
                return "iOS";
            } else if (!StringUtils.isBlank(category) && !StringUtils.isBlank(mobileModelName) && category.equals("desktop") && mobileModelName.equals("Safari")) {
                return "Macintosh";
            } else if (!StringUtils.isBlank(category) && category.equals("mobile")) {
                return "Android";
            } else {
                return "Windows";
            }
        }, DataTypes.StringType);

        spark.sqlContext().udf().register("isNewUserUDF", (String s1, String s2) -> {
            return s1.equals(s2) ? "yes" : "no";
        }, DataTypes.StringType);

        spark.sqlContext().udf().register("keyUDF", (String s1, String s2, String s3) -> {
            return DigestUtils.md5Hex(s1 + s2 + s3);
        }, DataTypes.StringType);
    }
}
