package cn._51doit.flink.day05;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.util.Collector;

import java.util.ArrayList;
import java.util.List;

/**
 * 深入研究Flink的底层是怎样实现的
 *
 * 使用FLink的状态编程API
 *
 * Flink的状态分为两种
 *   KeyedState （对KeyBy之后的Stream进行处理的，即KeyedStream）
 *       - ValueState 相当于Map<KEY, VALUE>
 *       - MapState   相当于Map<KEY, Map<k, v>>
 *       - ListState  相当于Map<KEY, List<E>>
 *   OperatorState（对没有KeyBy的Stream进行处理的，即普通的Stream）
 *
 */
public class ListStateDemo {


    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //开启checkpoint
        env.enableCheckpointing(10000);

        DataStreamSource<String> lines = env.socketTextStream("localhost", 8888);

        //将同一个用户的最近的行为数据保存起来
        //u001,view
        //u001,pay
        //u002,view
        //u003,view
        //u001,focus
        //u001,pay

        //将同一个用户的数据按照先后顺序保存起来（即将同一个用户的是保存到同一个List中）
        SingleOutputStreamOperator<Tuple2<String, String>> tpStream = lines.map(new MapFunction<String, Tuple2<String, String>>() {

            @Override
            public Tuple2<String, String> map(String s) throws Exception {
                String[] fields = s.split(",");
                return Tuple2.of(fields[0], fields[1]);
            }
        });

        KeyedStream<Tuple2<String, String>, String> keyedStream = tpStream.keyBy(t -> t.f0);

        SingleOutputStreamOperator<List<String>> res = keyedStream.map(new RichMapFunction<Tuple2<String, String>, List<String>>() {

            private ListState<String> listState;

            @Override
            public void open(Configuration parameters) throws Exception {
                ListStateDescriptor<String> stateDescriptor = new ListStateDescriptor<>("events-state", String.class);
                listState = getRuntimeContext().getListState(stateDescriptor);
            }

            @Override
            public List<String> map(Tuple2<String, String> tp) throws Exception {
                String event = tp.f1;
                listState.add(event);
                ArrayList<String> lst = (ArrayList<String>) listState.get();
                if (lst.size() > 10) {
                    lst.remove(0);
                }
                return lst;
            }
        });

        res.addSink(new SinkFunction<List<String>>() {
            @Override
            public void invoke(List<String> value, Context context) throws Exception {
                for (String e : value) {
                    System.out.println(e);
                }
            }
        });

        env.execute();


    }

}
