package server_timu;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import util.Kafka_util;

import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Iterator;
import java.util.Map;
import java.util.Random;

import static util.Kafka_util.getKafkaSource;

/*
        3、在任务1，2进行的同时，需要将order_master、order_detail、customer_login_log备份
        至Hbase中，同时建立Hive外表(表结构与离线数据ods层一致，timestamp格式可用string或者long代
        替，rowkey使用随机数（0-9）+yyyyMMddHHmmssSSS，其中对于customer_login_log缺失主键，请
        用随机数（0-9）+用户id+登陆时间代替)，同时在Hive中查询每个表前5条数据；
 */
public class T3 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //  todo 创建解析json数据的mapper实例化
        ObjectMapper objectMapper = new ObjectMapper();

        //  todo 创建fact_order_master主题的kafka_source  并且拿到数据
        KafkaSource<String> master_source = getKafkaSource("fact_order_master");
        DataStreamSource<String> master_data = env.fromSource(
                master_source,
                WatermarkStrategy.noWatermarks(),
                "master_source"
        );
        master_data.print("master");
        //  todo 数据写入hbase
        master_data.addSink(new master_to_hbase());

        //  todo 创建fact_order_detail主题的kafka_source  并且拿到数据
        KafkaSource<String> detail_source = getKafkaSource("fact_order_detail");
        DataStreamSource<String> detail_data = env.fromSource(
                detail_source,
                WatermarkStrategy.noWatermarks(),
                "detail_source"
        );
        detail_data.print("detail");
        //  todo 数据写入hbase
        detail_data.addSink(new detail_to_hbase("flink_order_detail"));


        //  todo 创建dim_customer_login_log主题的kafka_source  并且拿到数据
        KafkaSource<String> log_source = getKafkaSource("dim_customer_login_log");
        DataStreamSource<String> log_data = env.fromSource(
                log_source,
                WatermarkStrategy.noWatermarks(),
                "log_source"
        );
        log_data.print("log");
        //  todo 数据写入hbase
        log_data.addSink(new log_to_hbase("flink_customer_login_log"));


        env.execute();
    }

    //  todo 创建master写入hbase的的sink
    private static  class master_to_hbase extends RichSinkFunction<String> {
        ObjectMapper objectMapper=new ObjectMapper();
        Random random=new Random();
        SimpleDateFormat format=new SimpleDateFormat("yyyyMMddHHmmssSSS");
        private Connection connection;
        private Table table;

        //  todo 初始化的内容
        @Override
        public void open(Configuration parameters) throws Exception {
            //  创建hbase配置对象
            org.apache.hadoop.conf.Configuration configuration = HBaseConfiguration.create();
            //  配置zookeeper链接信息
            configuration.set(HConstants.CLIENT_ZOOKEEPER_QUORUM,"192.168.40.110");
            configuration.set(HConstants.CLIENT_ZOOKEEPER_CLIENT_PORT,"2181");
            //  创建hbase链接
            connection= ConnectionFactory.createConnection(configuration);
            //  从连接中获取指定表的管理员(获得操作权限)
            table=connection.getTable(TableName.valueOf("flink_order_master"));

        }

        //  关闭函数
        @Override
        public void close() throws Exception {
            //  关闭表链接
            table.close();
            //  关闭hbase链接
            connection.close();
        }

        // todo 数据写入逻辑
        @Override
        public void invoke(String value, Context context) throws Exception {
            //  创建hbase的put对象(构造行建) 作用:用于插入或者更新一条数据  参数里面相当于指定主键
            Put put = new Put((random.nextInt(10) + format.format(new Date())).toString().getBytes());
            //  将数据解析为jsonNode
            JsonNode jsonNode = objectMapper.readTree(value);
            //  获取json的所有字段键值对迭代器
            Iterator<Map.Entry<String, JsonNode>> fields = jsonNode.fields();
            //  遍历json字段并添加到put对象
            while(fields.hasNext()){
                //  获取当前行的键值对数据，数据类型为map类型
                Map.Entry<String, JsonNode> str = fields.next();
                //  获取字段的名称
                String key_str = str.getKey();
                //  获取该字段的值
                String value_str = str.getValue().asText();
                System.out.println(key_str);
                //  将json的每个字段作为一列存储，这里的info为列族(需提前存在)
                //  括号里面就是 列族名 字段名 字段的值     这只是一列数据，需要通过循环来添加多列完成一行数据，
                //  所以put是在循环外面
                //  hbase是以字节数组的形式存储数据，所以这里用到了这么多的getBytes()
                put.addColumn("info".getBytes(),key_str.getBytes(),value_str.toString().getBytes());
            }
            System.out.println("------------------");
            //  todo table.put()插入或者更新一行数据   括号里面的put就是在循环里面构建的put对象
            table.put(put);
        }
    }


    //  todo 创建detail写入hbase的sink
    private static class detail_to_hbase extends RichSinkFunction<String>{
        private String tableName;
        private Connection connection;
        private Table table;
        Random random=new Random();
        ObjectMapper objectMapper=new ObjectMapper();
        SimpleDateFormat format=new SimpleDateFormat("yyyyMMddHHmmssSSS");

        //  todo 创建有参构造对象
        public detail_to_hbase(String tableName){
            this.tableName=tableName;
        }

        //  todo 初始化内容
        @Override
        public void open(Configuration parameters) throws Exception {
            //  创建hbase配置对象
            org.apache.hadoop.conf.Configuration configuration = HBaseConfiguration.create();
            //  配置zookeeper链接信息
            configuration.set(HConstants.CLIENT_ZOOKEEPER_QUORUM,"192.168.40.110");
            configuration.set(HConstants.CLIENT_ZOOKEEPER_CLIENT_PORT,"2181");
            //  创建hbase链接
            connection= ConnectionFactory.createConnection(configuration);
            //  从连接中获取指定表的管理员(获得操作权限)
            table=connection.getTable(TableName.valueOf(tableName.getBytes()));
        }

        //  todo 关闭函数
        @Override
        public void close() throws Exception {
            if (table!=null) table.close();
            if (connection!=null)  connection.close();
        }

        //  todo 数据写入逻辑
        @Override
        public void invoke(String value, Context context) throws Exception {
            Put put = new Put((random.nextInt(10) + format.format(new Date())).getBytes());
            JsonNode jsonNode = objectMapper.readTree(value);
            Iterator<Map.Entry<String, JsonNode>> fields = jsonNode.fields();
            while (fields.hasNext()) {
                Map.Entry<String, JsonNode> str = fields.next();
                String key_str = str.getKey();
                String value_str = str.getValue().asText();
                System.out.println(key_str);
                put.addColumn("info".getBytes(),key_str.getBytes(),value_str.toString().getBytes());
            }
            System.out.println("-------------");
            table.put(put);
        }

    }


    //  todo 创建log数据写入hbase的sink
    private static class log_to_hbase extends RichSinkFunction<String>{
        private String tableName;
        private Connection connection;
        private Table table;
        Random random=new Random();
        ObjectMapper objectMapper=new ObjectMapper();
        SimpleDateFormat format=new SimpleDateFormat("yyyyMMddHHmmssSSS");

        //  todo 创建有参构造对象
        public log_to_hbase(String tableName){
            this.tableName=tableName;
        }


        //  todo 初始化内容
        @Override
        public void open(Configuration parameters) throws Exception {
            //  创建hbase配置对象
            org.apache.hadoop.conf.Configuration configuration = HBaseConfiguration.create();
            //  配置zookeeper链接信息
            configuration.set(HConstants.CLIENT_ZOOKEEPER_QUORUM,"192.168.40.110");
            configuration.set(HConstants.CLIENT_ZOOKEEPER_CLIENT_PORT,"2181");
            //  创建hbase链接
            connection= ConnectionFactory.createConnection(configuration);
            //  从连接中获取指定表的管理员(获得操作权限)
            table=connection.getTable(TableName.valueOf(tableName.getBytes()));
        }

        //  todo 关闭函数
        @Override
        public void close() throws Exception {
            if (table!=null) table.close();
            if (connection!=null)  connection.close();
        }

        //  todo 数据处理逻辑
        @Override
        public void invoke(String value, Context context) throws Exception {
            //  将数据进行拆分
            String[] str = value.split(":")[1]
                    .replace("(", "")
                    .replace(")", "")
                    .replace("'", "")
                    .split("\\|");
            //  打印数据
            System.out.println(str[0]+","+str[1]+","+str[2]+","+str[3]);
            //  随机数(0-9) +用户id+登陆时间代替行建(主键，唯一标识)
            Put put = new Put((random.nextInt(10) + str[0] + str[1]).getBytes());
            //  todo 添加所有的列以及值
            put.addColumn("info".getBytes(),"user_id".getBytes(),str[0].getBytes());
            put.addColumn("info".getBytes(),"user_time".getBytes(),str[1].getBytes());
            put.addColumn("info".getBytes(),"user_ip".getBytes(),str[2].getBytes());
            put.addColumn("info".getBytes(),"user_status".getBytes(),str[3].getBytes());
            //  todo put这一行数据
            table.put(put);
        }

    }
}
