//package cn._51doit.day05.hbaseReduce;
//
//import org.apache.flink.api.common.functions.ReduceFunction;
//import org.apache.flink.api.common.typeinfo.Types;
//import org.apache.flink.api.java.tuple.Tuple2;
//import org.apache.flink.configuration.Configuration;
//import org.apache.flink.streaming.api.datastream.DataStreamSource;
//import org.apache.flink.streaming.api.datastream.KeyedStream;
//import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
//import org.apache.flink.streaming.api.datastream.WindowedStream;
//import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
//import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
//import org.apache.flink.streaming.api.functions.sink.SinkFunction;
//import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
//import org.apache.flink.streaming.api.windowing.time.Time;
//import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
//
//import org.apache.hadoop.hbase.client.Delete;
//import org.apache.hadoop.hbase.client.Put;
//import org.apache.hadoop.hbase.client.Table;
//
//
///**
// * @create: 2021-10-20 15:40
// * @author: 今晚打脑斧
// * @program: xdhg
// * @Description:
// *      使用滑动方式向hbase中写数据
// **/
//public class HbaseReduceDemo {
//    public static void main(String[] args) throws Exception {
//
//
//        Configuration configuration = new Configuration();
//        configuration.setInteger("rest.port", 8081);
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(configuration);
//        DataStreamSource<String> lines = env.socketTextStream("doit01", 8888);
//
//
//
//        //flink,1
//        SingleOutputStreamOperator<Tuple2<String, Integer>> wordAndCount = lines.map(line -> {
//            String[] fields = line.split(",");
//            String word = fields[0];
//            int count = Integer.parseInt(fields[1]);
//            return Tuple2.of(word, count);
//        }).returns(Types.TUPLE(Types.STRING, Types.INT));
//
//        //先调研keyBy
//        KeyedStream<Tuple2<String, Integer>, String> keyedStream = wordAndCount.keyBy(t -> t.f0);
//
//        //按照ProcessingTime划分滚动窗口,30秒滚动一次
//        //即滚动窗口30秒触发一次,就会产生一次结果
////        WindowedStream<Tuple2<String, Integer>, String, TimeWindow> windowedStream = keyedStream.timeWindow(Time.seconds(30));
//        WindowedStream<Tuple2<String, Integer>, String, TimeWindow> windowedStream = keyedStream.window(TumblingProcessingTimeWindows.of(Time.seconds(30)));
//
//        SingleOutputStreamOperator<Tuple2<String, Integer>> res = windowedStream.reduce(new ReduceFunction<Tuple2<String, Integer>>() {
//            @Override
//            public Tuple2<String, Integer> reduce(Tuple2<String, Integer> tp1, Tuple2<String, Integer> tp2) throws Exception {
//
//                tp1.f1 = tp1.f1 + tp2.f1;
//                return tp1;
//            }
//        });
//
////        res.print();
//        res.addSink(new RichSinkFunction<Tuple2<String, Integer>>() {
//            Table tbl;
//            @Override
//            public void open(Configuration parameters) throws Exception {
//                tbl = HbaseUtil.getTable("tb_user");
//            }
//
//            @Override
//            public void invoke(Tuple2<String, Integer> value, Context context) throws Exception {
//
//                String key = value.f0;
//                Integer values = value.f1;
//
//                Put put = new Put(key.getBytes());
//                put.addColumn("base_info".getBytes() , "value".getBytes() , values.toString().getBytes()) ;
//                tbl.put(put);
//
//                System.out.println("写数据");
//
//            }
//
//            @Override
//            public void close() throws Exception {
//                tbl.close();
//            }
//        });
//
//
//        env.execute();
//
//    }
//
//}
