package com.mlamp;

/**
 * @program: flink-hive
 * @description:
 * @author: gengchao
 * @create: 2020-11-30 15:57
 */


import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.table.api.SqlDialect;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

import java.util.Properties;

public class Flink2hive {

    private static Properties getProperties() {
        Properties props = new Properties();
        props.put("bootstrap.servers", "localhost:9092");
        props.put("group.id", "test");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");  //key 反序列化
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("auto.offset.reset", "latest"); //value 反序列化
        return props;
    }

    private static DataStreamSource<String> read(StreamExecutionEnvironment env) {
        try {
            DataStreamSource<String> dataStreamSource = env.addSource(new FlinkKafkaConsumer<>(
                    "test",  //kafka topic
                    new SimpleStringSchema(),  // String 序列化
                    getProperties())).setParallelism(1);
            return dataStreamSource;
        } catch (Exception e) {
            e.printStackTrace();
            return null;
        }
    }


    private static DataStream<WxLogVo> process(DataStreamSource<String> dataStreamSource) {
        if (dataStreamSource == null) {
            return null;
        }
        try {
            return dataStreamSource.map(new WxLogMap());
        } catch (Exception e) {
            e.printStackTrace();
            return null;
        }
    }


    private static void sink(StreamExecutionEnvironment env, DataStream<WxLogVo> dataStream) {
        if (dataStream == null) {
            return;
        }
        try {
            String name = "myhive";      // Catalog名称，定义一个唯一的名称表示
            String defaultDatabase = "test";  // 默认数据库名称
//        String hiveConfDir = "/opt/hive/hive-2.3.4/conf/";  // hive-site.xml路径
            String hiveConfDir = "D:\\assitsoft\\apache-hive-2.3.9-bin\\conf"; // 只需要有metastore的地址
            String version = "2.3.4";       // Hive版本号

            env.enableCheckpointing(10000);
            env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
            StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env);

            HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir, version);
            tableEnvironment.registerCatalog(name, hive);
            tableEnvironment.useCatalog(name);
            tableEnvironment.getConfig().setSqlDialect(SqlDialect.HIVE);
            tableEnvironment.useDatabase(defaultDatabase);

            tableEnvironment.createTemporaryView("tmp_logs", dataStream);

            String insertSql = "insert into wx_log_orc_par SELECT recordTime, `user`, `group`, host, hostIp, dstIp, serv, app, site, tmType, upFlux, downFlux, `timeStamp`, `version`, dataType, recordDate, `location`,`ts` FROM tmp_logs";
            tableEnvironment.executeSql(insertSql);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public static void main(String[] args) throws Exception {

        try {


            final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

            DataStreamSource<String> read = read(env);
            DataStream<WxLogVo> processed = null;
            if (read != null) {
                processed = process(read);
            }
//            read.print();
            sink(env, processed);



            env.execute("test gogogog!");
        } catch (Exception e) {
            e.printStackTrace();
        }
//        env.enableCheckpointing(10000);
//        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
//        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env);
//
//
//
//
//
////        EnvironmentSettings settings = EnvironmentSettings.newInstance().inBatchMode().build();
////        TableEnvironment bsEnv = TableEnvironment.create(settings);
////
//        DataStream<UserInfo> dataStream = env.addSource(new MySource())
//                .assignTimestampsAndWatermarks(
//                        new AssignerWithPunctuatedWatermarks<UserInfo>() {
//                            long water = 0L;
//
//                            @Override
//                            public Watermark checkAndGetNextWatermark(
//                                    UserInfo lastElement,
//                                    long extractedTimestamp) {
//                                return new Watermark(water);
//                            }
//
//                            @Override
//                            public long extractTimestamp(
//                                    UserInfo element,
//                                    long recordTimestamp) {
//                                water = element.getTs().getTime();
//                                return water;
//                            }
//                        });
//
//
//        //构造hive catalog
//        String name = "myhive";      // Catalog名称，定义一个唯一的名称表示
//        String defaultDatabase = "log_db";  // 默认数据库名称
//        String hiveConfDir = "/opt/hive/hive-2.3.4/conf/";  // hive-site.xml路径
//        String version = "2.3.4";       // Hive版本号
//
//
//        HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir, version);
//        tableEnvironment.registerCatalog(name, hive);
//        tableEnvironment.useCatalog(name);
//        tableEnvironment.getConfig().setSqlDialect(SqlDialect.HIVE);
//        tableEnvironment.useDatabase(defaultDatabase);
//
//        tableEnvironment.createTemporaryView("users", dataStream);
//
////      如果hive中已经存在了相应的表，则这段代码省略
////        String hiveSql = "CREATE external TABLE fs_table (\n" +
////                "  user_id STRING,\n" +
////                "  order_amount DOUBLE" +
////                ") partitioned by (dt string,h string,m string) " +
////                "stored as ORC " +
////                "TBLPROPERTIES (\n" +
////                "  'partition.time-extractor.timestamp-pattern'='$dt $h:$m:00',\n" +
////                "  'sink.partition-commit.delay'='0s',\n" +
////                "  'sink.partition-commit.trigger'='partition-time',\n" +
////                "  'sink.partition-commit.policy.kind'='metastore'" +
////                ")";
////        tableEnvironment.executeSql(hiveSql);
//
//        String insertSql = "insert into test SELECT userId, amount FROM users";
//        tableEnvironment.executeSql(insertSql).print();
//    }
//
//    public static class MySource implements SourceFunction<UserInfo> {
//
//        String userids[] = {
//                "4760858d-2bec-483c-a535-291de04b2247", "67088699-d4f4-43f2-913c-481bff8a2dc5",
//                "72f7b6a8-e1a9-49b4-9a0b-770c41e01bfb", "dfa27cb6-bd94-4bc0-a90b-f7beeb9faa8b",
//                "aabbaa50-72f4-495c-b3a1-70383ee9d6a4", "3218bbb9-5874-4d37-a82d-3e35e52d1702",
//                "3ebfb9602ac07779||3ebfe9612a007979", "aec20d52-c2eb-4436-b121-c29ad4097f6c",
//                "e7e896cd939685d7||e7e8e6c1930689d7", "a4b1e1db-55ef-4d9d-b9d2-18393c5f59ee"
//        };
//
//        @Override
//        public void run(SourceFunction.SourceContext<UserInfo> sourceContext) throws Exception {
//
//            while (true) {
//                String userid = userids[(int) (Math.random() * (userids.length - 1))];
//                UserInfo userInfo = new UserInfo();
//                userInfo.setUserId(userid);
//                userInfo.setAmount(Math.random() * 100);
//                userInfo.setTs(new Timestamp(System.currentTimeMillis()));
//                sourceContext.collect(userInfo);
//            }
//        }
//
//        @Override
//        public void cancel() {
//
//        }
//    }
//
//    public static class UserInfo implements java.io.Serializable {
//        private String userId;
//        private Double amount;
//        private Timestamp ts;
//
//        public String getUserId() {
//            return userId;
//        }
//
//        public void setUserId(String userId) {
//            this.userId = userId;
//        }
//
//        public Double getAmount() {
//            return amount;
//        }
//
//        public void setAmount(Double amount) {
//            this.amount = amount;
//        }
//
//        public Timestamp getTs() {
//            return ts;
//        }
//
//        public void setTs(Timestamp ts) {
//            this.ts = ts;
//        }
    }
}