package flink;


import com.alibaba.fastjson.JSONAware;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.functions.RichFilterFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import java.text.SimpleDateFormat;
import java.util.Properties;

public class UniqueVisitApp {
    public static void main(String[] args) throws Exception{
        //获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //获取kafka数据源
        Properties properties = new Properties();
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "kafkauv");
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.1.10:9092");
        DataStreamSource<String> uvDS = env.addSource(new FlinkKafkaConsumer<String>("uv", new SimpleStringSchema(), properties));

        //将数据转换为json格式
        SingleOutputStreamOperator<JSONObject> uvjson = uvDS.map(JSONObject::parseObject);

        //根据mid对数据进行keyby
        KeyedStream<JSONObject, String> uvKeyByMid = uvjson.keyBy(key -> key.getJSONObject("common").getString("mid"));

        //对keyby后的数据进行条件过滤
        SingleOutputStreamOperator<JSONObject> filter = uvKeyByMid.filter(new RichFilterFunction<JSONObject>() {
            private ValueState<String> dateState;
            private SimpleDateFormat simpleDateFormat;

            @Override
            public void open(Configuration parameters) throws Exception {
                //设置状态的超时时间以及更新时间的方式
                StateTtlConfig stateTtlConfig = new StateTtlConfig
                        .Builder(Time.hours(24))
                        .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
                        .build();
                ValueStateDescriptor<String> stringValueStateDescriptor = new ValueStateDescriptor<String>("value-state", String.class);
                stringValueStateDescriptor.enableTimeToLive(stateTtlConfig);
                dateState = getRuntimeContext().getState(stringValueStateDescriptor);
                simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
            }

            @Override
            public boolean filter(JSONObject jsonObject) throws Exception {
                //获取当前数据的last_page_id
                String last_page_id = jsonObject.getString("last_page_id");

                //判断状态是否为空
                if (last_page_id == null || last_page_id.length() <= 0) {
                    //取出上一条数据的状态
                    String lastvalue = dateState.value();

                    //取出今天的日期
                    String curDate = simpleDateFormat.format(jsonObject.getLong("ts"));

                    //判断时间是否相等
                    if (curDate.equals(lastvalue)) {
                        return false;
                    } else {
                        //更新状态值
                        dateState.update(curDate);
                        return true;
                    }

                } else {
                    return false;
                }
            }
        });

        //将数据写入kafka
        filter.print();
        filter.map(JSONAware::toJSONString).addSink(new FlinkKafkaProducer<String>("192.168.1.10:9092","uvkafka",new SimpleStringSchema()));

        //启动程序
        env.execute("UniqueVisitApp");
    }
}
