package app;

import bean.DauInfo;
import bean.PageLog;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.HasOffsetRanges;
import org.apache.spark.streaming.kafka010.OffsetRange;
import redis.clients.jedis.Jedis;
import scala.Tuple2;
import util.MyEsUtils;
import util.MyKafkaUtils;
import util.MyOffsetsUtils;
import util.MyRedisUtils;

import java.text.SimpleDateFormat;
import java.time.LocalDate;
import java.time.Period;
import java.util.*;

/**
 * 日活宽表
 *
 * 1. 准备实时环境
 * 2. 从Redis中读取偏移量
 * 3. 从kafka中消费数据
 * 4. 提取偏移量结束点
 * 5. 处理数据
 *     5.1 转换数据结构
 *     5.2 去重
 *     5.3 维度关联
 * 6. 写入ES
 * 7. 提交offsets
 */
public class DwdDauApp {
    final static String topicName = "DWD_PAGE_LOG_TOPIC_1018";
    final static String groupId = "DWD_DAU_GROUP";
    public static void main(String[] args) throws InterruptedException {
        SparkConf conf = new SparkConf().setMaster("local[4]").setAppName("ods_base_db_app");
        JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(5));
        //2. 从redis中读取偏移量
        Map<TopicPartition, Long> offsets = MyOffsetsUtils.readOffset(topicName, groupId);
        //3. 从Kafka中消费数据
        JavaInputDStream<ConsumerRecord<Object, Object>> kafkaDStream = null;
        if(offsets != null && !offsets.isEmpty()) {
            kafkaDStream = MyKafkaUtils.getKafkaDStream(jssc,topicName,groupId, offsets);
        } else {
            kafkaDStream = MyKafkaUtils.getKafkaDStream(jssc,topicName,groupId);
        }
        //4. 提取偏移量结束点
        final List<OffsetRange> list = new ArrayList<OffsetRange>();
        JavaDStream<ConsumerRecord<Object, Object>> offsetRangesDStream = kafkaDStream.transform(new Function<JavaRDD<ConsumerRecord<Object, Object>>, JavaRDD<ConsumerRecord<Object, Object>>>() {
            public JavaRDD<ConsumerRecord<Object, Object>> call(JavaRDD<ConsumerRecord<Object, Object>> rdd) throws Exception {
                OffsetRange[] offsetRanges = ((HasOffsetRanges) rdd.rdd()).offsetRanges();
                for(int i = 0; i < offsetRanges.length; i++) {
                    list.add(offsetRanges[i]);
                }
                return rdd;
            }
        });

        //5. 处理数据
        // 5.1 转换结构
        JavaDStream<PageLog> pageLogDStream = offsetRangesDStream.map(new Function<ConsumerRecord<Object, Object>, PageLog>() {
            @Override
            public PageLog call(ConsumerRecord<Object, Object> v1) throws Exception {
                String value = String.valueOf(v1.value());
                PageLog pageLog = JSON.parseObject(value, PageLog.class);
                return pageLog;
            }
        });
//        pageLogDStream.cache();
//        pageLogDStream.foreachRDD(new VoidFunction<JavaRDD<PageLog>>() {
//            @Override
//            public void call(JavaRDD<PageLog> pageLogJavaRDD) throws Exception {
//                System.out.println("自我审查前" + pageLogJavaRDD.count());
//            }
//        });
        //5.2 去重
        // 自我审查: 将页面访问数据中last_page_id不为空的数据过滤掉
        JavaDStream<PageLog> filterDStream = pageLogDStream.filter(new Function<PageLog, Boolean>() {
            @Override
            public Boolean call(PageLog v1) throws Exception {
                //last_page_id 的返回true 不为空的被过滤掉
                return v1.getLast_page_id() == null;
            }
        });

        // 第三方审查:  通过redis将当日活跃的mid维护起来,自我审查后的每条数据需要到redis中进行比对去重
        // redis中如何维护日活状态
        // 类型:    set
        // key :    DAU:DATE
        // value :  mid的集合
        // 写入API: sadd
        // 读取API: smembers
        // 过期:  24小时
        //filterDStream.filter()  // 每条数据执行一次. redis的连接太频繁.
        JavaDStream<PageLog> redisFilterDStream = filterDStream.mapPartitions(new FlatMapFunction<Iterator<PageLog>, PageLog>() {
            @Override
            public Iterator<PageLog> call(Iterator<PageLog> pageLogIterator) throws Exception {
                List<PageLog> pageLogs = new ArrayList<>();
                Jedis jedis = MyRedisUtils.getJedisFromPool();
                SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                while (pageLogIterator.hasNext()) {
                    // 提取每条数据中的mid (我们日活的统计基于mid， 也可以基于uid)
                    String mid = pageLogIterator.next().getMid();
                    //获取日期 , 因为我们要测试不同天的数据，所以不能直接获取系统时间.
                    Long ts = pageLogIterator.next().getTs();
                    Date date = new Date(ts);
                    String dateStr = sdf.format(date);
                    String redisDauKey = "DAU:"+dateStr;
                    //redis的判断是否包含操作
                    // saad 判断包含和写入实现了原子操作.  返回1 写入成功 返回0写入失败
                    Long isNew = jedis.sadd(redisDauKey, mid);
                    if(isNew == 1L) {
                        pageLogs.add(pageLogIterator.next());
                    }

                }
                jedis.close();
                return pageLogs.iterator();
            }
        });

        //5.3 维度关联
        JavaDStream<DauInfo> dauInfoDStream = redisFilterDStream.mapPartitions(new FlatMapFunction<Iterator<PageLog>, DauInfo>() {
            @Override
            public Iterator<DauInfo> call(Iterator<PageLog> pageLogIterator) throws Exception {
                List<DauInfo> dauInfos = new ArrayList<>();
                Jedis jedis = MyRedisUtils.getJedisFromPool();
                SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                while(pageLogIterator.hasNext()) {
                    DauInfo dauInfo = new DauInfo();
                    PageLog pageLog = pageLogIterator.next();
                    //1. 将pagelog中以后的字段拷贝到DauInfo中
                    BeanUtils.copyProperties(dauInfo,pageLog);
                    //2. 补充维度
                    //2.1  用户信息维度
                    String uid = pageLog.getPage_id();
                    String redisUidkey = "DIM:USER_INFO:" + uid;
                    String userInfoJson = jedis.get(redisUidkey);
                    JSONObject userInfoJsonObj = JSON.parseObject(userInfoJson);
                    //提取性别
                    String gender = userInfoJsonObj.getString("gender");
                    //提取生日
                    String birthday = userInfoJsonObj.getString("birthday"); // 1976-03-22
                    //换算年龄  根据当前时间和生日换算出来
                    LocalDate birthdayLd = LocalDate.parse(birthday);
                    LocalDate nowLd  = LocalDate.now();
                    Period period = Period.between(birthdayLd, nowLd);
                    int age = period.getYears();
                    //补充到对象中
                    dauInfo.setUser_gender(gender);
                    dauInfo.setUser_age(String.valueOf(age));
                    //2.2  地区信息维度
                    // redis中:
                    // 现在: DIM:BASE_PROVINCE:1
                    // 之前: DIM:BASE_PROVINCE:110000
                    String provinceID = dauInfo.getProvince_id();
                    String redisProvinceKey = "DIM:BASE_PROVINCE:"+provinceID;
                    String provinceJson = jedis.get(redisProvinceKey);
                    JSONObject provinceJsonObj = JSON.parseObject(provinceJson);
                    String provinceName = provinceJsonObj.getString("name");
                    String provinceIsoCode = provinceJsonObj.getString("iso_code");
                    String province3166 = provinceJsonObj.getString("iso_3166_2");
                    String provinceAreaCode = provinceJsonObj.getString("area_code");
                    //补充到对象中
                    dauInfo.setProvince_name(provinceName);
                    dauInfo.setProvince_iso_code(provinceIsoCode);
                    dauInfo.setProvince_area_code(provinceAreaCode);
                    //2.3  日期字段处理
                    Date date = new Date(pageLog.getTs());
                    String dtHr = sdf.format(date);
                    String[] dtHrArr = dtHr.split(" ");
                    String dt = dtHrArr[0];
                    String hr = (dtHrArr[1].split(":"))[0];
                    //补充到对象中
                    dauInfo.setDt(dt);
                    dauInfo.setHr(hr);
                    dauInfos.add(dauInfo);
                }
                jedis.close();
                return dauInfos.iterator();
            }
        });

        //写入到OLAP中
        //按照天分割索引，通过索引模板控制mapping、settings、aliases等.
        //准备ES工具类
        dauInfoDStream.foreachRDD(new VoidFunction<JavaRDD<DauInfo>>() {
            @Override
            public void call(JavaRDD<DauInfo> dauInfoJavaRDD) throws Exception {
                dauInfoJavaRDD.foreachPartition(new VoidFunction<Iterator<DauInfo>>() {
                    @Override
                    public void call(Iterator<DauInfo> dauInfoIterator) throws Exception {
                        List<Tuple2<String, DauInfo>> docs = new ArrayList<>();

                        while(dauInfoIterator.hasNext()) {
                            DauInfo dauInfo = dauInfoIterator.next();
                            Tuple2<String, DauInfo> tuple2 = new Tuple2<>(dauInfo.getMid(), dauInfo);
                            docs.add(tuple2);
                        }
                        if(docs.size() > 0) {
                            // 从第一条数据中获取日期
                            Tuple2<String, DauInfo> head = docs.get(0);
                            Long ts = head._2.getTs();
                            SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
                            String dateStr = sdf.format(new Date(ts));
                            String indexName = "gmall_dau_info_1018_" + dateStr;
                            List<Tuple2<String, Object>> docs_list = new ArrayList<>();
                            for(Tuple2<String, DauInfo> val : docs) {
                                DauInfo dauInfo = val._2;
                                String mid = val._1;
                                Tuple2<String, Object> temp = new Tuple2<>(mid, dauInfo.toObject());
                                docs_list.add(temp);
                            }
                            //写入到ES中
                            MyEsUtils.bulkSave(indexName, docs_list);

                        }
                    }
                });
                MyOffsetsUtils.saveOffset(topicName, groupId , list);
                list.clear();
            }
        });
        jssc.start();
        jssc.awaitTermination();
    }
}
