/*
 * Copyright 2019 WeBank
 * Licensed under the Apache License, Version 2.0 (the "License");
 *  you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 * http://www.apache.org/licenses/LICENSE-2.0
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package com.webank.wedatasphere.linkis.metadata.service.impl;

import com.google.common.collect.Maps;
import com.webank.wedatasphere.linkis.common.utils.ByteTimeUtils;
import com.webank.wedatasphere.linkis.hadoop.common.utils.HDFSUtils;
import com.webank.wedatasphere.linkis.metadata.hive.config.DSEnum;
import com.webank.wedatasphere.linkis.metadata.hive.config.DataSource;
import com.webank.wedatasphere.linkis.metadata.hive.dao.HiveMetaDao;
import com.webank.wedatasphere.linkis.metadata.service.DataSourceService;
import com.webank.wedatasphere.linkis.metadata.util.Constants;
import com.webank.wedatasphere.linkis.metadata.util.JdbcUtils;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.node.ArrayNode;
import org.codehaus.jackson.node.ObjectNode;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.sql.*;
import java.util.*;

@Service
public class DataSourceServiceImpl implements DataSourceService {

    private static final Logger logger = Logger.getLogger(DataSourceServiceImpl.class);

    private static FileSystem rootHdfs = null;

    @Autowired
    HiveMetaDao hiveMetaDao;


    ObjectMapper jsonMapper = new ObjectMapper();

    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    @Override
    public JsonNode getDbs(String userName, String dbType, String jdbcUsername, String jdbcPassword, String jdbcUrl) throws Exception {
        ArrayNode dbsNode = jsonMapper.createArrayNode();
        ObjectNode dbNode = jsonMapper.createObjectNode();
        if (Constants.JDBC.equals(dbType)) {
            Connection conn = JdbcUtils.getDatabaseMetaData(dbType, jdbcUsername, jdbcPassword, jdbcUrl);
            if (null != conn) {
                if (jdbcUrl.contains(Constants.GREENPLUM)) {
                    dbNode.put("dbName", jdbcUrl.substring(jdbcUrl.lastIndexOf("=") + 1));
                } else if (jdbcUrl.contains(Constants.ORACLE)) {
                    dbNode.put("dbName", jdbcUsername.toUpperCase());
                } else {
                    dbNode.put("dbName", jdbcUrl.substring(jdbcUrl.lastIndexOf("/") + 1));
                }
            }

        } else {
            List<String> dbs = hiveMetaDao.getDbsByUser(userName);
            for (String db : dbs) {
                dbNode.put("dbName", db);
            }
        }
        dbsNode.add(dbNode);
        return dbsNode;
    }

    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    @Override
    public JsonNode getDbsWithTables(String userName, String dbType, String jdbcUsername, String jdbcPassword, String jdbcUrl)
            throws Exception {
        String type = "jdbc";
        ArrayNode dbNodes = jsonMapper.createArrayNode();
        if (type.equals(dbType)) {
            String driver = "com.mysql.jdbc.Driver";
            String url = "jdbc:mysql://127.0.0.1:3307/dss";
            //用户名
            String user = "root";
            //密码
            String pwd = "root";
            ObjectMapper jsonMapper = new ObjectMapper();
            ObjectNode dbNode = jsonMapper.createObjectNode();
            //1、注册驱动
            Class.forName(driver);
            //2、获取连接
            Connection conn;
            try {
                conn = DriverManager.getConnection(url, user, pwd);
                String sql = "SELECT  table_name FROM information_schema.TABLES WHERE table_schema = " + "'" + jdbcUrl.substring(jdbcUrl.lastIndexOf("/") + 1) + "'" + " order by  table_name ;";
                PreparedStatement stmt = conn.prepareStatement(sql);
                ResultSet rs = stmt.executeQuery();
                dbNode.put("databaseName", "dss");
                ArrayNode tables = jsonMapper.createArrayNode();
                while (rs.next()) {
                    String name = rs.getString("table_name");
                    ObjectNode tableNode = jsonMapper.createObjectNode();
                    tableNode.put("tableName", name);
                    tableNode.put("isView", "false");
                    tableNode.put("databaseName", "dss");
                    tableNode.put("createdBy", "dss");
                    tableNode.put("createdAt", "dss");
                    tableNode.put("lastAccessAt", "dss");
                    tables.add(tableNode);
                }
                dbNode.put("tables", tables);
                dbNodes.add(dbNode);
                conn.close();  //3、关闭连接
            } catch (SQLException throwables) {
                throwables.printStackTrace();
            }
        } else {
            List<String> dbs = hiveMetaDao.getDbsByUser(userName);
            for (String db : dbs) {
                ObjectNode dbNode = jsonMapper.createObjectNode();
                dbNode.put("databaseName", db);
                dbNode.put("tables", queryTables(db, userName, "", "", "", ""));
                dbNodes.add(dbNode);
            }
        }

        return dbNodes;
    }

    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    @Override
    public JsonNode queryTables(String database, String userName, String jdbcUsername, String jdbcPassword, String jdbcUrl, String dbType) {
        ArrayNode tables = jsonMapper.createArrayNode();
        ArrayNode dbNodes = jsonMapper.createArrayNode();
        String driver = null;
        if (Constants.JDBC.equals(dbType)) {
            Connection conn = JdbcUtils.getDatabaseMetaData(dbType, jdbcUsername, jdbcPassword, jdbcUrl);
            if (null != conn) {
                try {
                    ObjectMapper jsonMapper = new ObjectMapper();
                    ObjectNode dbNode = jsonMapper.createObjectNode();
                    conn = DriverManager.getConnection(jdbcUrl, jdbcUsername, jdbcPassword);
                    DatabaseMetaData databaseMetaData = conn.getMetaData();
                    ResultSet rs = null;
                    // 获取所有表
                    if (jdbcUrl.contains(Constants.ORACLE)) {
                        rs = databaseMetaData.getTables(null, jdbcUsername.toUpperCase(), "%",
                                new String[]{"TABLE"});
                    } else {
                        rs = databaseMetaData.getTables(null, "%", "%",
                                new String[]{"TABLE"});
                    }
                    List<String> stringList = new ArrayList<>();
                    if (jdbcUrl.contains(Constants.GREENPLUM) || jdbcUrl.contains(Constants.POSTGRESQL)) {
                        while (rs.next()) {
                            String schemName = rs.getString("table_schem");
                            if (!stringList.contains(schemName)) {
                                stringList.add(schemName);
                            }
                        }
                        for (String item : stringList) {
                            ObjectNode dbNode2 = jsonMapper.createObjectNode();
                            ArrayNode tableNode2 = jsonMapper.createArrayNode();
                            rs = databaseMetaData.getTables(null, item, "%",
                                    new String[]{"TABLE"});
                            while (rs.next()) {
                                String name = rs.getString("table_name");
                                ObjectNode tableNode = jsonMapper.createObjectNode();
                                tableNode.put("tableName", name);
                                tableNode2.add(tableNode);
                            }
                            dbNode2.put("databaseName", item);
                            dbNode2.put("tables", tableNode2);
                            tables.add(dbNode2);
                        }
                    } else {
                        dbNode.put("databaseName", database);
                        List<String> list = new ArrayList<>();
                        while (rs.next()) {
                            String name = rs.getString("table_name");
                            String schemName = rs.getString("table_schem");
                            ObjectNode tableNode = jsonMapper.createObjectNode();
                            tableNode.put("tableName", name);
                            tableNode.put("isView", "false");
                            tableNode.put("schemName", schemName);
                            tableNode.put("createdBy", "root");
                            //tableNode.put("createdAt", "noe");
                            tables.add(tableNode);
                        }
                    }

                    conn.close();
                    //3、关闭连接
                } catch (SQLException throwables) {
                    throwables.printStackTrace();
                }
            }
        } else {
            List<Map<String, Object>> listTables;
            try {
                Map<String, String> map = Maps.newHashMap();
                map.put("dbName", database);
                map.put("userName", userName);
                listTables = hiveMetaDao.getTablesByDbNameAndUser(map);
            } catch (Throwable e) {
                logger.error("Failed to list Tables:", e);
                throw new RuntimeException(e);
            }
            for (Map<String, Object> table : listTables) {
                ObjectNode tableNode = jsonMapper.createObjectNode();
                tableNode.put("tableName", (String) table.get("NAME"));
                tableNode.put("isView", "VIRTUAL_VIEW".equals(table.get("TYPE")));
                tableNode.put("databaseName", database);
                tableNode.put("createdBy", (String) table.get("OWNER"));
                tableNode.put("createdAt", (Integer) table.get("CREATE_TIME"));
                tableNode.put("lastAccessAt", (Integer) table.get("LAST_ACCESS_TIME"));
                tables.add(tableNode);
            }
        }
        return tables;
    }

    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    @Override
    public JsonNode queryTableMeta(String dbName, String tableName, String userName, String jdbcUsername, String jdbcPassword, String jdbcUrl, String dbType) {
        logger.info("getTable:" + userName);
        ResultSet rs;
        List<Map<String, Object>> mapList = new ArrayList<>();
        ArrayNode columnsNode;
        List<Map<String, Object>> columns = null;
        Map<String, String> param = Maps.newHashMap();
        param.put("dbName", dbName);
        param.put("tableName", tableName);
        if (Constants.JDBC.equals(dbType)) {
            Connection conn = JdbcUtils.getDatabaseMetaData(dbType, jdbcUsername, jdbcPassword, jdbcUrl);
            if (null != conn) {
                try {
                    conn = DriverManager.getConnection(jdbcUrl, jdbcUsername, jdbcPassword);
                    DatabaseMetaData databaseMetaData = conn.getMetaData();
                    // 获取tableName表列信息
                    rs = databaseMetaData.getColumns(null, "%",
                            tableName, "%");
                    while (rs.next()) {
                        Map<String, Object> map = new HashMap<>();
                        String columnName = rs.getString("COLUMN_NAME");
                        String typenName = rs.getString("TYPE_NAME");
                        map.put("COLUMN_NAME", columnName);
                        map.put("TYPE_NAME", typenName);
                        mapList.add(map);
                    }
                    //3、关闭连接
                    conn.close();
                } catch (SQLException e) {
                    e.printStackTrace();
                }
                columns = mapList;
            }
        } else {
            columns = hiveMetaDao.getColumns(param);
        }
        columnsNode = jsonMapper.createArrayNode();
        for (Map<String, Object> column : columns) {
            ObjectNode fieldNode = jsonMapper.createObjectNode();
            fieldNode.put("columnName", (String) column.get("COLUMN_NAME"));
            fieldNode.put("columnType", (String) column.get("TYPE_NAME"));
            fieldNode.put("columnComment", (String) column.get("COMMENT"));
            fieldNode.put("partitioned", false);
            columnsNode.add(fieldNode);
        }
        List<Map<String, Object>> partitionKeys = hiveMetaDao.getPartitionKeys(param);
        for (Map<String, Object> partitionKey : partitionKeys) {
            ObjectNode fieldNode = jsonMapper.createObjectNode();
            fieldNode.put("columnName", (String) partitionKey.get("PKEY_NAME"));
            fieldNode.put("columnType", (String) partitionKey.get("PKEY_TYPE"));
            fieldNode.put("columnComment", (String) partitionKey.get("PKEY_COMMENT"));
            fieldNode.put("partitioned", true);
            columnsNode.add(fieldNode);

        }
        //
        return columnsNode;
    }

    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    public String getTableLocation(String database, String tableName) {
        Map<String, String> param = Maps.newHashMap();
        param.put("dbName", database);
        param.put("tableName", tableName);
        String tableLocation = hiveMetaDao.getLocationByDbAndTable(param);
        DataSourceServiceImpl.logger.info("tableLocation:" + tableLocation);
        return tableLocation;
    }

    @Override
    public JsonNode getTableSize(String dbName, String tableName, String userName) {
        logger.info("getTable:" + userName);


        String tableSize = "";
        try {
            FileStatus tableFile = getRootHdfs().getFileStatus(new Path(this.getTableLocation(dbName, tableName)));
            if (tableFile.isDirectory()) {
                tableSize = ByteTimeUtils.bytesToString(getRootHdfs().getContentSummary(tableFile.getPath()).getLength());
            } else {
                tableSize = ByteTimeUtils.bytesToString(tableFile.getLen());
            }
        } catch (IOException e) {
            logger.error("getTableSize error:", e);
        }

        ObjectNode sizeJson = jsonMapper.createObjectNode();
        sizeJson.put("size", tableSize);
        sizeJson.put("tableName", dbName + "." + tableName);
        return sizeJson;
    }

    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    @Override
    public JsonNode getPartitionSize(String dbName, String tableName, String partitionName, String userName) {
        Map<String, String> map = Maps.newHashMap();
        map.put("dbName", dbName);
        map.put("tableName", tableName);
        map.put("partitionName", partitionName);
        map.put("userName", userName);
        Long partitionSize = hiveMetaDao.getPartitionSize(map);
        if (partitionSize == null) {
            partitionSize = 0L;
        }
        ObjectNode sizeJson = jsonMapper.createObjectNode();
        sizeJson.put("size", ByteTimeUtils.bytesToString(partitionSize));
        sizeJson.put("tableName", dbName + "." + tableName);
        sizeJson.put("partitionName", partitionName);
        return sizeJson;
    }

    @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
    @Override
    public JsonNode getPartitions(String dbName, String tableName, String userName) {
        Map<String, String> map = Maps.newHashMap();
        map.put("dbName", dbName);
        map.put("tableName", tableName);
        List<String> partitions = hiveMetaDao.getPartitions(map);
        Collections.sort(partitions);
        Collections.reverse(partitions);

        ObjectNode partitionJson = jsonMapper.createObjectNode();
        partitionJson.put("tableName", dbName + "." + tableName);
        if (CollectionUtils.isEmpty(partitions)) {
            partitionJson.put("isPartition", false);
        } else {
            partitionJson.put("isPartition", true);
            partitionJson.put("partitions", jsonMapper.createArrayNode());
            int depth = StringUtils.countMatches(partitions.get(0), "/");
            Map<String, JsonNode> nameToNode = Maps.newHashMap();
            for (String partition : partitions) {
                String[] lables = StringUtils.split(partition, "/");
                for (int i = 0; i <= depth; i++) {
                    if (i == 0) {
                        if (!nameToNode.containsKey(lables[i])) {
                            ObjectNode childJson = jsonMapper.createObjectNode();
                            childJson.put("label", lables[i]);
                            childJson.put("path", lables[i]);
                            childJson.put("children", jsonMapper.createArrayNode());
                            nameToNode.put(lables[i], childJson);
                            ((ArrayNode) partitionJson.get("partitions")).add(childJson);
                        }
                    } else {
                        String parentPath = StringUtils.join(Arrays.copyOfRange(lables, 0, i), "/");
                        String currentPath = StringUtils.join(Arrays.copyOfRange(lables, 0, i + 1), "/");
                        if (!nameToNode.containsKey(currentPath)) {
                            ObjectNode childJson = jsonMapper.createObjectNode();
                            childJson.put("label", lables[i]);
                            childJson.put("path", currentPath);
                            childJson.put("children", jsonMapper.createArrayNode());
                            nameToNode.put(currentPath, childJson);
                            ((ArrayNode) nameToNode.get(parentPath).get("children")).add(childJson);
                        }
                    }
                }
            }
        }
        return partitionJson;
    }

    private FileSystem getRootHdfs() {
        if (rootHdfs == null) {
            synchronized (this) {
                if (rootHdfs == null) {
                    rootHdfs = HDFSUtils.getHDFSRootUserFileSystem();
                }
            }
        }
        return rootHdfs;
    }


}
