package app;


import bean.PageActionLog;
import bean.PageDisplayLog;
import bean.PageLog;
import bean.StartLog;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.serializer.SerializeConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.HasOffsetRanges;
import org.apache.spark.streaming.kafka010.OffsetRange;
import util.MyKafkaUtils;
import util.MyOffsetsUtils;

import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

/**
 * 日志数据的消费分流
 * 1. 准备实时处理环境 StreamingContext
 *
 * 2. 从Kafka中消费数据
 *
 * 3. 处理数据
 *     3.1 转换数据结构
 *           专用结构  Bean
 *           通用结构  Map JsonObject
 *     3.2 分流
 *
 * 4. 写出到DWD层
 */
public class OdsBaseLogApp {
    final static String DWD_PAGE_LOG_TOPIC = "DWD_PAGE_LOG_TOPIC_1018";  // 页面访问
    final static String DWD_PAGE_DISPLAY_TOPIC = "DWD_PAGE_DISPLAY_TOPIC_1018"; //页面曝光
    final static String DWD_PAGE_ACTION_TOPIC  = "DWD_PAGE_ACTION_TOPIC_1018"; //页面事件
    final static String DWD_START_LOG_TOPIC = "DWD_START_LOG_TOPIC_1018"; // 启动数据
    final static String DWD_ERROR_LOG_TOPIC = "DWD_ERROR_LOG_TOPIC_1018" ;// 错误数据
    public static void main(String[] args) throws InterruptedException {
        SparkConf conf = new SparkConf().setMaster("local[4]").setAppName("ods_base_log_app");
        JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(5));

        final String topicName = "ODS_BASE_LOG_1018";  //对应生成器配置中的主题名
        final String groupId = "ODS_BASE_LOG_GROUP_1018";
        Map<TopicPartition, Long> offsets = MyOffsetsUtils.readOffset(topicName, groupId);
        JavaInputDStream<ConsumerRecord<Object, Object>> kafkaDStream = null;
        if(offsets != null && !offsets.isEmpty()) {
            kafkaDStream = MyKafkaUtils.getKafkaDStream(jssc,topicName,groupId, offsets);
        } else {
            kafkaDStream = MyKafkaUtils.getKafkaDStream(jssc,topicName,groupId);
        }

        // TODO 补充: 从当前消费到的数据中提取offsets , 不对流中的数据做任何处理.
        final List<OffsetRange> list = new ArrayList<OffsetRange>();

        JavaDStream<ConsumerRecord<Object, Object>> offsetRangesDStream = kafkaDStream.transform(new Function<JavaRDD<ConsumerRecord<Object, Object>>, JavaRDD<ConsumerRecord<Object, Object>>>() {
            public JavaRDD<ConsumerRecord<Object, Object>> call(JavaRDD<ConsumerRecord<Object, Object>> rdd) throws Exception {
                OffsetRange[] offsetRanges = ((HasOffsetRanges) rdd.rdd()).offsetRanges();
                for(int i = 0; i < offsetRanges.length; i++) {
                    list.add(offsetRanges[i]);
                }
                return rdd;
            }
        });
        kafkaDStream.print();
        //3. 处理数据
        //3.1 转换数据结构
        JavaDStream<JSONObject> jsonObjDStream = offsetRangesDStream.map(new Function<ConsumerRecord<Object, Object>, JSONObject>() {
            public JSONObject call(ConsumerRecord<Object, Object> v1) throws Exception {
                String log = String.valueOf(v1.value());
                JSONObject jsonObject = JSON.parseObject(log);
                return jsonObject;
            }
        });

        //3.2 分流
        // 日志数据：
        //   页面访问数据
        //      公共字段
        //      页面数据
        //      曝光数据
        //      事件数据
        //      错误数据
        //   启动数据
        //      公共字段
        //      启动数据
        //      错误数据
        //分流规则:
        // 错误数据: 不做任何的拆分， 只要包含错误字段，直接整条数据发送到对应的topic
        // 页面数据: 拆分成页面访问， 曝光， 事件 分别发送到对应的topic
        // 启动数据: 发动到对应的topic
        jsonObjDStream.foreachRDD((VoidFunction<JavaRDD<JSONObject>>) rdd -> {
            rdd.foreachPartition((VoidFunction<Iterator<JSONObject>>) jsonObjectIterator -> {
                while (jsonObjectIterator.hasNext()) {
                    JSONObject jsonObject = jsonObjectIterator.next();
                    //分流错误数据
                    JSONObject errObj = jsonObject.getJSONObject("err");
                    if(errObj != null) {
                        //将错误数据发送到 DWD_ERROR_LOG_TOPIC
                        MyKafkaUtils.send(DWD_ERROR_LOG_TOPIC, jsonObject.toJSONString() );
                    } else {
                        // 提取公共字段
                        JSONObject commonObj = jsonObject.getJSONObject("common");
                        String ar = commonObj.getString("ar");
                        String uid = commonObj.getString("uid");
                        String os = commonObj.getString("os");
                        String ch = commonObj.getString("ch");
                        String isNew= commonObj.getString("is_new");
                        String md = commonObj.getString("md");
                        String mid = commonObj.getString("mid");
                        String vc = commonObj.getString("vc");
                        String ba= commonObj.getString("ba");
                        //提取时间戳
                        Long ts = jsonObject.getLong("ts");
                        // 页面数据
                        JSONObject pageObj = jsonObject.getJSONObject("page");
                        if(pageObj != null) {
                            //提取page字段
                            String pageId = pageObj.getString("page_id");
                            String pageItem = pageObj.getString("item");
                            String pageItemType = pageObj.getString("item_type");
                            Long duringTime = pageObj.getLong("during_time");
                            String lastPageId = pageObj.getString("last_page_id");
                            String sourceType= pageObj.getString("source_type");
                            //封装成PageLog
                            PageLog pageLog = new PageLog(mid,uid,ar,ch,isNew,md,os,vc,ba,
                                    pageId,lastPageId,pageItem,pageItemType,duringTime,sourceType,ts);
                            //发送到DWD_PAGE_LOG_TOPIC
                            MyKafkaUtils.send(DWD_PAGE_LOG_TOPIC , JSON.toJSONString(pageLog , new SerializeConfig(true)));
                            //提取曝光数据
                            JSONArray displaysJsonArr = jsonObject.getJSONArray("displays");
                            if(displaysJsonArr != null && displaysJsonArr.size() > 0 ){
                                for(int i = 0; i < displaysJsonArr.size(); i++) {
                                    JSONObject displayObj = displaysJsonArr.getJSONObject(i);
                                    //提取曝光字段
                                    String displayType  = displayObj.getString("display_type");
                                    String displayItem  = displayObj.getString("item");
                                    String displayItemType  = displayObj.getString("item_type");
                                    String posId  = displayObj.getString("pos_id");
                                    String order  = displayObj.getString("order");
                                    //封装成PageDisplayLog
                                    PageDisplayLog pageDisplayLog = new PageDisplayLog(mid,uid,ar,ch,isNew,md,os,vc,ba,pageId,lastPageId,pageItem,pageItemType,duringTime,sourceType,displayType,displayItem,displayItemType,order,posId,ts);
                                    // 写到 DWD_PAGE_DISPLAY_TOPIC
                                    MyKafkaUtils.send(DWD_PAGE_DISPLAY_TOPIC , JSON.toJSONString(pageDisplayLog , new SerializeConfig(true)));
                                }
                            }
                            //提取事件数据
                            JSONArray actionJsonArr = jsonObject.getJSONArray("actions");
                            if(actionJsonArr != null && actionJsonArr.size() > 0) {
                                for(int i = 0; i < actionJsonArr.size(); i++) {
                                    JSONObject actionObj = actionJsonArr.getJSONObject(i);
                                    //提取字段
                                    String actionId = actionObj.getString("action_id");
                                    String actionItem = actionObj.getString("item");
                                    String actionItemType  = actionObj.getString("item_type");
                                    Long actionTs  = actionObj.getLong("ts");
                                    PageActionLog pageActionLog = new PageActionLog(mid,uid,ar,ch,isNew,md,os,vc,ba,pageId,lastPageId,pageItem,pageItemType,duringTime,sourceType,actionId,actionItem,actionItemType,actionTs,ts);
                                    MyKafkaUtils.send(DWD_PAGE_ACTION_TOPIC , JSON.toJSONString(pageActionLog , new SerializeConfig(true)));
                                }
                            }
                        }
                        // 启动数据
                        JSONObject startJsonObj = jsonObject.getJSONObject("start");
                        if(startJsonObj != null) {
                            //提取字段
                            String entry  = startJsonObj.getString("entry");
                            Long loadingTime  = startJsonObj.getLong("loading_time");
                            String openAdId  = startJsonObj.getString("open_ad_id");
                            Long openAdMs  = startJsonObj.getLong("open_ad_ms");
                            Long openAdSkipMs = startJsonObj.getLong("open_ad_skip_ms");
                            //封装StartLog
                            StartLog startLog = new StartLog(mid,uid,ar,ch,isNew,md,os,vc,ba,entry,openAdId,loadingTime,openAdMs,openAdSkipMs,ts);
                            //写出DWD_START_LOG_TOPIC
                            MyKafkaUtils.send(DWD_START_LOG_TOPIC , JSON.toJSONString(startLog ,new SerializeConfig(true)));
                        }
                    }
                }
                // foreachPartition里面:  Executor段执行， 每批次每分区执行一次
                //刷写Kafka
                MyKafkaUtils.flush();

            });
            MyOffsetsUtils.saveOffset(topicName,groupId,list);
            list.clear();
        });

        jssc.start();
        jssc.awaitTermination();

    }
}
