package com.navinfo.platform.etl.flink.tool;

import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableColumn;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;
import org.apache.flink.table.data.StringData;
import org.apache.flink.table.types.logical.utils.LogicalTypeUtils;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.Velocity;
import org.apache.velocity.app.VelocityEngine;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.*;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;

/**
 * 通过Hive表生成对应的Entity<br/>
 * <p/>
 * 使用方式一：<br/>
 *   1、直接运行main方法，传入程序参数：--database mydb --table mytable --hiveConfDir myhivedir<br/>
 *   2、例如：--database dataplatform_qingqi<br/>
 *   3、例如：--table navinfo_parquet_0200<br/>
 *   4、例如：--hiveConfDir E:\ideaIU\IdeaProjects\dongfeng-location-bigdata\flink-applications\etl-tool\src\main\resources<br/>
 *   5、生成mytable对应的Entity文件在target目录下<br/>
 * <p/>
 * 使用方式二：<br/>
 *   1、执行maven clean package生成可执行jar包<br/>
 *   2、进入jar包目录<br/>
 *   3、java -jar etl-tool-1.208-RC47-SNAPSHOT.jar --database mydb --table mytable --hiveConfDir myhivedir<br/>
 *   4、生成mytable对应的Entity文件在jar包的当前目录下<br/>
 * @author: web
 * @date: 2020-09-08
 **/
public class TableEntityGenerator {
    private static final Logger logger = LoggerFactory.getLogger(TableEntityGenerator.class);

    public static void main(String[] args) throws IOException, URISyntaxException {
        ParameterTool params = ParameterTool.fromArgs(args);
        if(!params.has("database") || !params.has("table")|| !params.has("hiveConfDir")){
            logger.error("请配置程序参数database和table，格式为[--database mydb --table mytable --hiveConfDir myHiveConfDir]");
            System.exit(-1);
        }

        //初始化StreamTableEnvironment
        StreamExecutionEnvironment sEnv = StreamExecutionEnvironment.getExecutionEnvironment();
        EnvironmentSettings envSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(sEnv, envSettings);

        //构建Hive的Catalog
        String catalog = "hive";
        String dbName = params.get("database");
        String tableName = params.get("table");
        String hiveConfDir = params.get("hiveConfDir");
        tEnv.registerCatalog(catalog, new HiveCatalog("hive", dbName, hiveConfDir));
        tEnv.useCatalog(catalog);
        tEnv.useDatabase(dbName);

        //构建Velocity环境
        Properties pro = new Properties();
        pro.setProperty(Velocity.OUTPUT_ENCODING, "UTF-8");
        pro.setProperty(Velocity.INPUT_ENCODING, "UTF-8");
        pro.setProperty("resource.loader", "class");
        pro.setProperty("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
        VelocityEngine velocity = new VelocityEngine(pro);
        Template template = velocity.getTemplate("template/TableBean.vm","UTF-8");

        //通过hive表获取对应的字段名称和字段类型
        List<Tuple2<String,String>> columns = new ArrayList<>();
        TableSchema schema = tEnv.from(tableName).getSchema();
        logger.error(tableName+"表的Schema定义为："+schema);

        List<TableColumn> tableColumns = schema.getTableColumns();
        for (int i=0, size=tableColumns.size(); i<size ; i++) {
            TableColumn col = tableColumns.get(i);
            //名称转换成驼峰形式
            String colName = ColumnTools.lineToHump(col.getName());
            //hive类型映射成Java类型
            Class<?> colType = LogicalTypeUtils.toInternalConversionClass(col.getType().getLogicalType());
            colType = colType.isAssignableFrom(StringData.class) ? String.class : colType;
            columns.add(Tuple2.of(colType.getSimpleName(), colName));
            logger.error("第"+(i+1)+"个字段对应的Java属性："+ colName + " " + colType.getSimpleName());
        }

        //首字母大写
        String entityName = tableName.substring(0,1).toUpperCase() + tableName.substring(1);
        entityName = ColumnTools.lineToHump(entityName);
        VelocityContext context = new VelocityContext();
        context.put("tableName", tableName);
        context.put("entityName", entityName);
        context.put("columns", columns);

        //创建输出文件
        File targetPath = new File(TableEntityGenerator.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath()).getParentFile();
        String targetName = entityName+".java";
        File file = new File(targetPath, targetName);
        if (!file.getParentFile().exists()){
            file.getParentFile().mkdirs();
        }
        if (!file.exists()){
            file.createNewFile();
        }

        try(FileOutputStream outStream = new FileOutputStream(file);
            BufferedWriter bufferedWriter = new BufferedWriter(new OutputStreamWriter(outStream, StandardCharsets.UTF_8));
        ){
            template.merge(context, bufferedWriter);
            bufferedWriter.flush();
            logger.error("成功生成Java文件:"  + (targetPath + File.separator + targetName));
        }
    }
}
