package com.saga.energy.app;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.saga.energy.bean.Energy15;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.SqlDialect;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

import java.util.Properties;

public class EnergyKafkaToHive {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        Properties properties = new Properties();
//        properties.setProperty("bootstrap.servers", "192.168.100.114:9092,192.168.100.197:9092,192.168.100.142:9092");
//        properties.setProperty("zookeeper", "192.168.100.114:2181,192.168.100.197:2181,192.168.100.142:2181/kafka");
        properties.setProperty("bootstrap.servers", "hadoop01:9092,hadoop02:9092,hadoop03:9092");
        properties.setProperty("group.id", "energy1");
        properties.setProperty("auto.offset.reset", "earliest");

        DataStreamSource<String> energy_15_min = env.addSource(new FlinkKafkaConsumer<>("ENERGY_15_MIN", new SimpleStringSchema(), properties));


        SingleOutputStreamOperator<Energy15> energy15Stream = energy_15_min.map(new MapFunction<String, Energy15>() {
            @Override
            public Energy15 map(String s) {
                Energy15 energy151 = JSON.parseObject(s, Energy15.class);
                energy151.setDt(energy151.getDate_time().replaceAll("-", ""));
                System.out.println(energy151.toString());
                return energy151;
            }
        });

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        tableEnv.createTemporaryView("energyData", energy15Stream);

        String name = "hive";      // Catalog名称，定义一个唯一的名称表示
        String defaultDatabase = "saga_dw";  // 默认数据库名称
        String hiveConfDir = "/opt/module/hive-3.1.2/conf";  // hive-site.xml路径

        String version = "3.1.2";       // Hive版本号
//        HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir, version);

//        tableEnv.registerCatalog("hive", hive);
//        tableEnv.useCatalog("hive");
//        tableEnv.useDatabase("saga_dw");

        tableEnv.executeSql("select project_id, build_id, date_time, dt from energyData").print();

//        String insertSQL = "insert into ods_energy_15_min select project_id, build_id, date_time, dt from energyData";

//        tableEnv.executeSql(insertSQL);

        env.execute("energy :");
    }
}
