package com.bw.gmall.realtime.app.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.bw.gmall.realtime.utils.DateFormatUtil;
import com.bw.gmall.realtime.utils.MyKafkaUtil;
import com.bw.gmall.realtime.utils.MyKafkaUtil2;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;

import java.util.Date;
import java.util.Random;

/*
1.获取ods 中日志数topic_log
2.处理过滤topic_log
3.对新老用户进行处理
               is_news:1
                         状态=null  我要把当前人ts转换成年月日  跟新到状态值
                         状态！=null   判断当前数据的ts 和状态日期是否相同  如果不同跟新数据
               is_new:0
                         状态=null    将ts-1day 跟新到状态中
                         如果有值 不需要处理
4. 把主流数据拆分成5个流   分别存入到5个主题中
1.创建流式环境
2.设置并行度
3.运行jar 向topic_log主题 发送数据
4.从 Kafka 读取主流数据
5.数据清洗，转换结构
6.将脏数据写出到 Kafka 指定主题
 */
public class BaseLogAppShopDwd {
    public static void main(String[] args) throws Exception {

        // TODO 1. 初始化环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        String topic = "ods_traffic";
        String groupId = "base_log_consumer";

        DataStreamSource<String> source = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer(topic, groupId));

/*统计周期内买家访问您店铺（观看店铺自播直播间、
观看自制全屏页短视频3秒及以上、
浏览店铺自制图文3秒及以上、浏览全屏微详情、访问宝贝详情页及店铺其他页面*/
        SingleOutputStreamOperator<String> page = source.filter(new FilterFunction<String>() {
            @Override
            public boolean filter(String value) throws Exception {
                JSONObject jsonObject = JSON.parseObject(value);

               if(jsonObject.getJSONObject("page") != null){
                   String string = jsonObject.getJSONObject("page").getString("page_action");
                   if(string.equals("a")||
                           string.equals("b")||
                           string.equals("c")||
                           string.equals("e")||
                           string.equals("f")){
                       return true;

                   }
                   return  false;
               }else{
                    return false;
                }

            }
        });

        SingleOutputStreamOperator<JSONObject> map = page.map(JSONObject::parseObject);

        //店铺id  需要两个key_by   浏览量缓存
        KeyedStream<JSONObject, String> keyedStream =
                map.keyBy(jsonObj -> jsonObj.getJSONObject("common").getString("mid"));

        keyedStream.process(new KeyedProcessFunction<String, JSONObject, JSONObject>() {
            ValueState<String>        sw;
            ValueState<String>        xw;
            @Override
            public void open(Configuration parameters) throws Exception {
                ValueStateDescriptor<String> valueStateDescriptor =
                        new ValueStateDescriptor<>("last_visit_dt", String.class);
                //5.1  8:8:8     5.2  7点     5.2 8:8:8


                //配置状态的生命周期
                valueStateDescriptor.enableTimeToLive(
                        StateTtlConfig
                                .newBuilder(Time.days(1L))// 过期时间1天
                                //在跟新状态的时候也要跟新存活时间
                                // 设置在创建和更新状态时更新存活时间
                                .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
                                .build()
                );
                this.sw = getRuntimeContext().getState(valueStateDescriptor);
                this.xw = getRuntimeContext().getState(valueStateDescriptor);



            }

            @Override
            public void processElement(JSONObject value, Context ctx, Collector<JSONObject> out) throws Exception {
                String value1 = sw.value();
                String value2 = xw.value();

                Long ts = value.getLong("ts");
                if(value1==null){
                    String s = DateFormatUtil.toYmdHms(ts);
                    Integer integer = new Integer(s.substring(11, 13));
                    if(integer<=12){
                        sw.update("1");
                        out.collect(value);
                    }

                }
                if(value2==null){
                    String s = DateFormatUtil.toYmdHms(ts);
                    Integer integer = new Integer(s.substring(11, 13));
                    if(integer>12){
                        xw.update("1");
                        out.collect(value);
                    }

                }


            }
        });
        page.print("------------>");
        page.addSink(MyKafkaUtil.getFlinkKafkaProducer("dwd_traffic"));
        env.execute();
    }
}
