package com.atguigu.education.app.dwd.db;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONAware;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.education.util.DateFormatUtil;
import com.atguigu.education.util.KafkaUtil;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichFilterFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.codehaus.jettison.json.JSONException;

import java.time.Duration;

/**
 * @title: DwdTradeUniqueBuyerDetail
 * @Author Mr.Liu
 * @Date: 2022/9/3 15:59
 * @Version 1.0
 */
public class DwdTradeUniqueBuyerDetail {
    public static void main(String[] args) throws Exception {
        // TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2 设置状态后端
        /*
        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
        env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");
         */
        String group_id="dwd_trade_unique_buyer_detail1";
        String topic_name="dwd_trade_order_pre_process";
        DataStreamSource<String> sourceStream = env.addSource(KafkaUtil.getKafkaConsumer(topic_name, group_id));


        SingleOutputStreamOperator<JSONObject> jsonStream = sourceStream.map(new MapFunction<String, JSONObject>() {
            @Override
            public JSONObject map(String s) throws Exception {

                JSONObject jsonObject = JSON.parseObject(s);

                return jsonObject;
            }
        });

//        jsonStream.print("pre>>>>>>>>>>>>");


//       判断
        SingleOutputStreamOperator<JSONObject> filter = jsonStream.filter(new FilterFunction<JSONObject>() {
            @Override
            public boolean filter(JSONObject jsonObject) throws Exception {

                boolean flag = false;

                if (jsonObject != null) {
                    flag = true;
                }

                return flag;
            }
        });


        KeyedStream<JSONObject, String> keyedStream = filter.keyBy(new KeySelector<JSONObject, String>() {
            @Override
            public String getKey(JSONObject jsonObject) throws Exception {
                return jsonObject.getString("user_id");
            }
        });

        SingleOutputStreamOperator<JSONObject> filterStream = keyedStream.filter(new RichFilterFunction<JSONObject>() {
            ValueState<String> orderDt = null;

            @Override
            public void open(Configuration parameters) throws Exception {
                ValueStateDescriptor<String> last_order = new ValueStateDescriptor<>("last_order", String.class);
                last_order.enableTimeToLive(StateTtlConfig.newBuilder(Time.days(1)).
                        setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite).build());
                orderDt = getRuntimeContext().getState(last_order);
            }

            @Override
            public boolean filter(JSONObject jsonObject) throws Exception {
                String date = DateFormatUtil.toDate(jsonObject.getLong("od_ts"));
                String value = orderDt.value();
                boolean flag = false;
                if (value == null || !date.equals(value)) {
                    flag = true;
                }
                return flag;
            }
        });

        filterStream.print("filter----------");
       filterStream.map(JSONAware::toJSONString)
                .addSink(KafkaUtil.getKafkaProducer(group_id));
        env.execute(group_id);
    }
}
