package com.ruoze.bdp.function.impl;

import cn.hutool.core.collection.CollectionUtil;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.ruoze.bdp.constants.DataSourceConstants;
import com.ruoze.bdp.entity.TableStructVo;
import com.ruoze.bdp.entity.datasource.JDBCConnectionParamDTO;
import com.ruoze.bdp.enums.DbType;
import com.ruoze.bdp.function.DataSourceFunction;
import com.ruoze.bdp.utils.ServiceException;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service;

import java.sql.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

@Service
public class HiveDataSourceFunction implements DataSourceFunction {

    @Override
    public boolean connectTest(String info) {
        JSONArray results = query(DataSourceConstants.HIVE_VALIDATION_QUERY, info);
        return results.getJSONObject(0).getLongValue("_C0") == 1;
    }


    @Override
    public List<String> getMetaDatabases(String info) {
        List<String> databases = new ArrayList<>();
        Connection connection = getConnection(info);
        //获取元数据
        try {
            //获取库的元数据信息
            DatabaseMetaData metaData = connection.getMetaData();
            ResultSet schemas = metaData.getSchemas();
            while (schemas.next()) {
                String schema = schemas.getString("TABLE_SCHEM");
                databases.add(schema);
            }

        } catch (SQLException e) {
            throw new RuntimeException(e);
        }
        return databases;
    }

    @Override
    public List<String> getMetaTables(String info, String database) {
        Connection connection = getConnection(info);
        List<String> tableList = new ArrayList<>();
        try{
            //获取元数据
            DatabaseMetaData metaData = connection.getMetaData();
            //获取表的元数据信息
            /**
             * 参数说明：
             * 参数:catalog:目录名称，一般都为空.
             * 参数：schema:数据库名，对于oracle来说就用户名
             * 参数：tablename:表名称
             * 参数：type :表的类型(TABLE | VIEW), 如果为空，则查询所有的TABLE & VIEW
             */
            ResultSet tables = metaData.getTables(null, database, null, new String[]{"TABLE"});
            while (tables.next()) {
                String tableName = tables.getString("TABLE_NAME");
                tableList.add(tableName);
            }
        } catch (Exception e) {
            throw new RuntimeException(e);
        }finally {
            closeConnection(connection,null);
        }
        return tableList;
    }

    @Override
    public List<Map<String, String>> getMetaColumns(String info, String database, String table) {
        Connection connection = getConnection(info);
        List<Map<String, String>> tableColumnsList = new ArrayList<>();

        Statement statement = null;
        try {
            statement = connection.createStatement();
            //获取元数据
            DatabaseMetaData metaData = connection.getMetaData();
            //获取表的元数据信息
            ResultSet tables = metaData.getTables(null, database, table, new String[]{"TABLE"});
            while (tables.next()) {
                String tableName = tables.getString("TABLE_NAME");
                if(tableName.equalsIgnoreCase(table)){
                    String sql = "describe " + tableName;
                    ResultSet res = statement.executeQuery(sql);
                    //获取数据表的属性列名、类型、注释
                    while (res.next()) {
                        Map<String, String> columMap = new HashMap<>();
                        String columnName = res.getString(1);
                        String typeName = res.getString(2);
                        //String COMMENT = res.getString(3);
                        columMap.put(columnName, typeName);
                        tableColumnsList.add(columMap);
                    }
                }
            }
        } catch (Exception e) {
            throw new RuntimeException(e);
        }finally {
            closeConnection(connection,statement);
        }
        return tableColumnsList;
    }

    /**
     * https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL
     * 官网创建DataBases的语句：
     * CREATE [REMOTE] (DATABASE|SCHEMA) [IF NOT EXISTS] database_name
     *   [COMMENT database_comment]
     *   [LOCATION hdfs_path]
     *   [MANAGEDLOCATION hdfs_path]
     *   [WITH DBPROPERTIES (property_name=property_value, ...)];
     *
     *   COMMENT : 备注
     *   LOCATION：现在指的是外部表的默认目录
     *   MANAGEDLOCATION：指的是托管表的默认路径
     *   【建议MANAGEDLOCATION位于metastore.warehouse.dir中，这样所有托管表都有一个公共根，其中包含公共治理策略。】
     *   【它可以与metastore.warehouse.tenant.colocation一起使用，使其指向仓库根目录之外的目录，以具有基于租户的公共根目录，在该目录中可以设置配额和其他策略。】
     *   DBPROPERTIES：属性值
     *
     */
    @Override
    public boolean createDatabase(String info, String dbName) {
        Connection connection = getConnection(info);
        Statement statement = null;
        try {
            String createDbSql = String.format(DataSourceConstants.JDBC_CREATE_DATABASE_EXEC_QUERY, dbName);
            statement = connection.createStatement();
            statement.executeUpdate(createDbSql);
            // 验证是否创建成功。
            List<String> metaDatabases = getMetaDatabases(info);
            if(metaDatabases.contains(dbName)){
                return true;
            }
        } catch (Exception e) {
            throw new RuntimeException(e);
        }finally {
            closeConnection(connection, statement);
        }
        return false;
    }

    @Override
    public DbType getSourceType() {
        return DbType.HIVE;
    }

    @Override
    public boolean createTable(String info, TableStructVo tableVo) {
        Connection connection = getConnection(info);
        Statement statement = null;
        String dataBaseName = tableVo.getDatabaseName().trim().toLowerCase();
        String tableName = tableVo.getTableName().trim().toLowerCase();
        List<Map<String, String>> columnDefinitionList = tableVo.getColumnDefinitionList();
        if(StringUtils.isEmpty(tableName) || CollectionUtil.isEmpty(columnDefinitionList)){
            return false;
        }
        try {
            //String createDbSql = String.format(DataSourceConstants.JDBC_CREATE_DATABASE_EXEC_QUERY, dbName);
            // 拼接sql
            String createDbSql = packageCreateTableSql(tableVo);
            System.out.println("拼接的Sql：" + createDbSql);
            statement = connection.createStatement();
            statement.executeUpdate(createDbSql);
            // 验证是否创建成功。
            List<String> metaTables = getMetaTables(info, dataBaseName);
            if(metaTables.contains(tableName)){
                return true;
            }
        } catch (Exception e) {
            throw new RuntimeException(e);
        }finally {
            closeConnection(connection, statement);
        }
        return false;
    }

    /**
     * https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL
     * 官网创建TABLE的语句：
     * CREATE [TEMPORARY] [EXTERNAL] TABLE [IF NOT EXISTS] [db_name.]table_name    -- (Note: TEMPORARY available in Hive 0.14.0 and later)
     *   [(col_name data_type [column_constraint_specification] [COMMENT col_comment], ... [constraint_specification])]
     *   [COMMENT table_comment]
     *   [PARTITIONED BY (col_name data_type [COMMENT col_comment], ...)]
     *   [CLUSTERED BY (col_name, col_name, ...) [SORTED BY (col_name [ASC|DESC], ...)] INTO num_buckets BUCKETS]
     *   [SKEWED BY (col_name, col_name, ...)                  -- (Note: Available in Hive 0.10.0 and later)]
     *      ON ((col_value, col_value, ...), (col_value, col_value, ...), ...)
     *      [STORED AS DIRECTORIES]
     *   [
     *    [ROW FORMAT row_format]
     *    [STORED AS file_format]
     *      | STORED BY 'storage.handler.class.name' [WITH SERDEPROPERTIES (...)]  -- (Note: Available in Hive 0.6.0 and later)
     *   ]
     *   [LOCATION hdfs_path]
     *   [TBLPROPERTIES (property_name=property_value, ...)]   -- (Note: Available in Hive 0.6.0 and later)
     *   [AS select_statement];   -- (Note: Available in Hive 0.5.0 and later; not supported for external tables)
     *
     *   COMMENT : 备注
     *   PARTITIONED BY：数据分区创建分区表
     *   CLUSTERED BY：可以使用CLUSTRED BY列对表或分区进行分块
     *   SORTED BY：可以通过SORT BY列在该分块内对数据进行排序。这可以提高某些类型查询的性能。
     *   SORTED AS：【数据存储格式】
     *
     *
     *   data_type
     *   : primitive_type
     *   | array_type
     *   | map_type
     *   | struct_type
     *   | union_type  -- (Note: Available in Hive 0.7.0 and later)
     *
     * primitive_type
     *   : TINYINT
     *   | SMALLINT
     *   | INT
     *   | BIGINT
     *   | BOOLEAN
     *   | FLOAT
     *   | DOUBLE
     *   | DOUBLE PRECISION -- (Note: Available in Hive 2.2.0 and later)
     *   | STRING
     *   | BINARY      -- (Note: Available in Hive 0.8.0 and later)
     *   | TIMESTAMP   -- (Note: Available in Hive 0.8.0 and later)
     *   | DECIMAL     -- (Note: Available in Hive 0.11.0 and later)
     *   | DECIMAL(precision, scale)  -- (Note: Available in Hive 0.13.0 and later)
     *   | DATE        -- (Note: Available in Hive 0.12.0 and later)
     *   | VARCHAR     -- (Note: Available in Hive 0.12.0 and later)
     *   | CHAR        -- (Note: Available in Hive 0.13.0 and later)
     *
     *
     *   array_type
     *   : ARRAY < data_type >
     *
     * map_type
     *   : MAP < primitive_type, data_type >
     *
     * struct_type
     *   : STRUCT < col_name : data_type [COMMENT col_comment], ...>
     *
     * union_type
     *    : UNIONTYPE < data_type, data_type, ... >  -- (Note: Available in Hive 0.7.0 and later)
     *
     *  row_format
     *   : DELIMITED [FIELDS TERMINATED BY char [ESCAPED BY char]] [COLLECTION ITEMS TERMINATED BY char]
     *         [MAP KEYS TERMINATED BY char] [LINES TERMINATED BY char]
     *         [NULL DEFINED AS char]   -- (Note: Available in Hive 0.13 and later)
     *   | SERDE serde_name [WITH SERDEPROPERTIES (property_name=property_value, property_name=property_value, ...)]
     *
     *
     *  file_format: 【文件存储格式 & 包括压缩格式】
     *   : SEQUENCEFILE
     *   | TEXTFILE    -- (Default, depending on hive.default.fileformat configuration)
     *   | RCFILE      -- (Note: Available in Hive 0.6.0 and later)
     *   | ORC         -- (Note: Available in Hive 0.11.0 and later)
     *   | PARQUET     -- (Note: Available in Hive 0.13.0 and later)
     *   | AVRO        -- (Note: Available in Hive 0.14.0 and later)
     *   | JSONFILE    -- (Note: Available in Hive 4.0.0 and later)
     *   | INPUTFORMAT input_format_classname OUTPUTFORMAT output_format_classname
     *
     *   column_constraint_specification:
     *   : [ PRIMARY KEY|UNIQUE|NOT NULL|DEFAULT [default_value]|CHECK  [check_expression] ENABLE|DISABLE NOVALIDATE RELY/NORELY ]
     *
     * default_value:
     *   : [ LITERAL|CURRENT_USER()|CURRENT_DATE()|CURRENT_TIMESTAMP()|NULL ]
     *
     * constraint_specification:
     *   : [, PRIMARY KEY (col_name, ...) DISABLE NOVALIDATE RELY/NORELY ]
     *     [, PRIMARY KEY (col_name, ...) DISABLE NOVALIDATE RELY/NORELY ]
     *     [, CONSTRAINT constraint_name FOREIGN KEY (col_name, ...) REFERENCES table_name(col_name, ...) DISABLE NOVALIDATE
     *     [, CONSTRAINT constraint_name UNIQUE (col_name, ...) DISABLE NOVALIDATE RELY/NORELY ]
     *     [, CONSTRAINT constraint_name CHECK [check_expression] ENABLE|DISABLE NOVALIDATE RELY/NORELY ]
     *
     * CREATE [TEMPORARY] [EXTERNAL] TABLE [IF NOT EXISTS] [db_name.]table_name
     *   LIKE existing_table_or_view_name
     *   [LOCATION hdfs_path];
     *
     *
     */

    /**
     * use dbpHive1;
     * create table people(
     *      id int,
     *      name string,
     *      work_city array<string>,
     *      sex_age struct<sex:string,age:int>,
     *      dept_job map<string,string>
     * )row format delimited
     *      fields terminated by '|'                  【字段间元素的分隔符】
     *      collection items terminated by ','        【集合间元素的分隔符】
     *      map keys terminated by ':'                【map数组元素的分隔符】
     *      lines terminated by '\n'                  【行分隔符】
     *      PARTITIONED BY(day string, hour string)
     *      stored as textfile;                       【数据存储格式】
     *                                                【数据压缩格式】
     */
    private String packageCreateTableSql(TableStructVo tableVo){
        StringBuilder str = new StringBuilder();
        str.append("CREATE TABLE IF NOT EXISTS ")
                .append(tableVo.getDatabaseName())
                .append(".")
                .append(tableVo.getTableName())
                .append("(");
        List<Map<String, String>> columnDefinitionList = tableVo.getColumnDefinitionList();
        for (int i = 0; i < columnDefinitionList.size(); i++) {
            Map<String, String> colMap = columnDefinitionList.get(i);
            str.append(colMap.get("columnName"))
                    .append(" ")
                    .append(colMap.get("columnType"));
            if (i != columnDefinitionList.size() - 1) {
                str.append(",");
            } else {
                str.append(")");
            }
        }

        Map<String, Object> tableMap = tableVo.getTableOptions();
        if (tableMap.keySet() != null) {
            //if(key.equals("partitioned")){  // 分区字段（传入的是：["col1":"String","col2":"INT",......])
            Object partitioned = tableMap.get("partitioned");
            if(partitioned != null){
                Map<String,String> partitionedMap = (Map)partitioned;
                str.append(" PARTITIONED BY (");
                for(String key : partitionedMap.keySet()){
                    str.append(key).append(" ").append(partitionedMap.get(key)).append(",");
                }
                str.deleteCharAt(str.length()-1); //删除最后一个，
                str.append(")");
            }

            String rowFormatValue = tableMap.getOrDefault("row_format", ",").toString();
            if(StringUtils.isNotBlank(rowFormatValue)) { // 行分隔符
                str.append(" ROW FORMAT DELIMITED FIELDS TERMINATED BY \"")
                        .append(rowFormatValue)
                        .append("\"");
            }
            Object collectionValue = tableMap.get("collection");
            if(collectionValue != null){
                str.append(" COLLECTION ITEMS TERMINATED BY \"")
                        .append(collectionValue)
                        .append("\"");
            }
            Object mapValue = tableMap.get("map");
            if(mapValue != null){
                str.append(" MAP KEYS TERMINATED BY \"")
                        .append(mapValue)
                        .append("\"");
            }

            // 文件存储格式 & 包括压缩格式
            Object fileFormatValue = tableMap.get("file_format");
            if(fileFormatValue != null){
                str.append(" STORED AS  ")
                        .append(fileFormatValue);
            }
        }

        return str.toString();
    }

    private JSONArray query(String sql, String info){
        Connection connection = getConnection(info);
        Statement statement = null;
        JSONArray results = new JSONArray();
        try {
            if(connection != null) {
                //连通性测试 select 1
                statement = connection.createStatement();
                ResultSet resultSet = statement.executeQuery(sql);
                ResultSetMetaData metaData = resultSet.getMetaData();

                if (resultSet != null) {
                    while (resultSet.next()) {
                        JSONObject jsonObject = new JSONObject();
                        for (int i = 1; i <= metaData.getColumnCount(); i++) {
                            jsonObject.put(metaData.getColumnName(i).toUpperCase(), resultSet.getObject(i));
                        }
                        results.add(jsonObject);
                    }
                }
            }
        } catch (Exception e) {
            throw new RuntimeException(e);
        }finally {
            closeConnection(connection,statement);
        }

        return results;
    }

    private Connection getConnection(String info){
        JDBCConnectionParamDTO paramDTO = JSONObject.parseObject(info, JDBCConnectionParamDTO.class);
        String driverName = DataSourceConstants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER;
        Connection connection ;
        try {
            Class.forName(driverName);
            connection = DriverManager.getConnection(paramDTO.getUrl(),paramDTO.getUser(),paramDTO.getPassword());
            if(connection == null){
                throw new ServiceException("200001","连接" + driverName + "元数据失败");
            }
        } catch ( Exception e) {
            throw new ServiceException("200001","连接" + driverName + "元数据失败");
        }
        return connection;
    }

    private void closeConnection(Connection connection,Statement statement) {
        if(statement != null){
            try {
                statement.close();
            }catch (Exception e){
                throw new ServiceException("200008","Hive的statement关闭失败");
            }
        }
        if(connection != null){
            try {
                connection.close();
            }catch (Exception e){
                throw new ServiceException("200009","Hive的connection关闭失败");
            }
        }
    }

}

