package com.bw;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

public class FlinkWeek1 {
    public static void main(String[] args) throws Exception {
        // 1.创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        // 2.读取Kafka
        // 1.3.1 就用addsource
        DataStreamSource<String> dataStream = env.addSource(MyKafkaUtil.getKafkaConsumer("topic_log", "topic_log1"));
        // 1.17.1就用fromsource
//        dataStream.print();
        SingleOutputStreamOperator<JSONObject> processStream = dataStream.keyBy(new KeySelector<String, String>() {
            @Override
            public String getKey(String s) throws Exception {
                JSONObject jsonObject = JSON.parseObject(s);
                return jsonObject.getJSONObject("common").getString("mid");
            }
        }).process(new KeyedProcessFunction<String, String, JSONObject>() {
            private ValueState<String> valueState;

            @Override
            public void open(Configuration parameters) throws Exception {
                valueState = getRuntimeContext().getState(new ValueStateDescriptor<String>("value_state", String.class));
            }

            @Override
            public void processElement(String s, KeyedProcessFunction<String, String, JSONObject>.Context context, Collector<JSONObject> collector) throws Exception {
//                JSONObject jsonObject = JSON.parseObject(s);
//                JSONObject common = jsonObject.getJSONObject("common");
//                String is_new = common.getString("is_new");
//                String value = valueState.value();
//                if ("1".equals(is_new)){
//                    if (value  == null){
//                        valueState.update("dahskjdhaksj");
//                    }else{
//                        common.put("is_new","0");
//                    }
//                }
//                collector.collect(jsonObject);

                // 复杂 保证第一天都是新用户
                JSONObject jsonObject = JSON.parseObject(s);
                JSONObject common = jsonObject.getJSONObject("common");
                Long ts = jsonObject.getLong("ts");
                String curDate = DateFormatUtil.tsToDate(ts);
                String is_new = common.getString("is_new");
                String value = valueState.value();
                if ("1".equals(is_new)) {
                    if (value == null) {
                        valueState.update(curDate);
                    } else if (!curDate.equals(value)) {
                        common.put("is_new", "0");
                    } else {

                    }
                }
                collector.collect(jsonObject);
            }
        });
        OutputTag<String> startTag = new OutputTag<String>("start", TypeInformation.of(String.class));
        OutputTag<String> errorTag = new OutputTag<String>("err", TypeInformation.of(String.class));
        OutputTag<String> displayTag = new OutputTag<String>("display", TypeInformation.of(String.class));
        OutputTag<String> actionTag = new OutputTag<String>("action", TypeInformation.of(String.class));

        SingleOutputStreamOperator<JSONObject> splitStream = processStream.process(new ProcessFunction<JSONObject, JSONObject>() {
            @Override
            public void processElement(JSONObject jsonObject, ProcessFunction<JSONObject, JSONObject>.Context context, Collector<JSONObject> collector) throws Exception {
                JSONObject err = jsonObject.getJSONObject("err");
                JSONObject start = jsonObject.getJSONObject("start");
                JSONObject displays = jsonObject.getJSONObject("displays");
                JSONObject actions = jsonObject.getJSONObject("actions");
                if (err != null) {
                    context.output(errorTag, jsonObject.toJSONString());
                } else if (start != null) {
                    context.output(startTag, jsonObject.toJSONString());
                } else if (displays != null) {
                    context.output(displayTag, jsonObject.toJSONString());
                } else if (actions != null) {
                    context.output(displayTag, jsonObject.toJSONString());
                } else {
                    collector.collect(jsonObject);
                }

            }
        });

        DataStream<String> startStream  = splitStream.getSideOutput(startTag);
        DataStream<String> errorStream = splitStream.getSideOutput(errorTag);
        DataStream<String> displayStream = splitStream.getSideOutput(displayTag);
        DataStream<String> actionStream = splitStream.getSideOutput(actionTag);
        // 6、寫到kakfa

        startStream.addSink(MyKafkaUtil.getKafkaProducer("start_topic"));

        // 数据库  flink状态
        env.execute();
    }
}
