package com.cetcs.kmga.dataManager.dao.h2base;


import com.cetcs.kmga.dataManager.dao.hbase.LogPage;
import com.cetcs.kmga.dataManager.global.LogQueryEhcacheOper;
import com.cetcs.kmga.dataManager.service.feature.FeatureCodeService;
import com.cetcs.kmga.dataManager.util.DruidH2Pool;
import com.cetcs.kmga.dataManager.util.H2QueryIn;
import com.cetcs.kmga.dataManager.util.IQueryConf;
import com.google.common.collect.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import org.springframework.util.StringUtils;

import java.sql.*;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

/**
 * 日志H2v操作类型
 *
 * @author xutao
 * @version V1.0 创建时间：2017-10-10 16:27
 *          Copyright 2017 by CETC
 */
@Repository
public class LogH2Query {

    private final static Logger LOGGER = LoggerFactory.getLogger(LogH2Query.class);
  

    @Autowired
    private FeatureCodeService featureCodeService;

    /**
     * 从缓存中查询日志数据库
     *
     * @param queryIns   查询条件
     * @param sortFields 排序字段
     * @param columnArr  数据列值
     * @param page       当前页
     * @param pageSize   每页多少条数据
     * @return
     */
    public LogPage queryCacheDataFromTb(List<H2QueryIn> queryIns, Map<String, Integer> sortFields, List<List<String>> columnArr, int page, int pageSize,String ipServ) {
        Connection conn = null;
        PreparedStatement preparedStatement = null;
        ResultSet rs = null;
        List<List<String>> ret = null;
        LogPage logRet = new LogPage( );
        logRet.setPageNumber(page);
        logRet.setPageSize(pageSize);
        int totalRecord = 0;
        try {
            conn = DruidH2Pool.getInstance( ).getConnection( );
            //开始导数据 数据是批量插入
            StringBuffer bufferOuterSql = new StringBuffer( );
            bufferOuterSql.append(" select  * from  ").append(LogQueryEhcacheOper.makeH2TbName(ipServ ));
            StringBuffer queryTotalSql = new StringBuffer( );
            queryTotalSql.append(" select  count(*) from  ").append(LogQueryEhcacheOper.makeH2TbName( ipServ));
            if (queryIns != null && queryIns.size( ) > 0) {
                bufferOuterSql.append(" WHERE ");
                queryTotalSql.append(" WHERE ");
                for (int i = 0; i < queryIns.size( ); i++) {
                    H2QueryIn queryIn = queryIns.get(i);
                    if (i != 0) {
                        bufferOuterSql.append(" and ");
                        queryTotalSql.append(" and ");
                    }
                    int queryType = queryIn.getQueryType( );
                    switch (queryType) {
                        //= 查询
                        case IQueryConf.QUERY_TYPE_EQUAL:
                            bufferOuterSql.append(" ").append(queryIn.getColumnName( )).append(" = '").append(queryIn.getKeywords( )).append("'");
                            queryTotalSql.append(" ").append(queryIn.getColumnName( )).append(" = '").append(queryIn.getKeywords( )).append("'");
                            break;
                        // in 查询
                        case IQueryConf.QURYY_TYPE_IN:
                            if (queryIn.getKeywords( ) instanceof List) {
                                List<Object> arrList = (List<Object>) queryIn.getKeywords( );
                                String inStr = "";
                                for (int j = 0; j < arrList.size( ); j++) {
                                    if (j != 0) {
                                        inStr += ",";
                                    }
                                    inStr += "'" + arrList.get(j) + "'";
                                }
                                bufferOuterSql.append(" ").append(queryIn.getColumnName( )).append(" in( ").append(inStr).append(" )");
                                queryTotalSql.append(" ").append(queryIn.getColumnName( )).append(" in( ").append(inStr).append(" )");
                            } else if (queryIn.getKeywords( ) instanceof String[]) {
                                String[] arrList = (String[]) queryIn.getKeywords( );
                                String inStr = "";
                                for (int j = 0; j < arrList.length; j++) {
                                    if (j != 0) {
                                        inStr += ",";
                                    }
                                    inStr += "'" + arrList[j] + "'";
                                }
                                bufferOuterSql.append(" ").append(queryIn.getColumnName( )).append(" in( ").append(inStr).append(" )");
                                queryTotalSql.append(" ").append(queryIn.getColumnName( )).append(" in( ").append(inStr).append(" )");
                            } else if (queryIn.getKeywords( ) instanceof String) {
                                bufferOuterSql.append(" ").append(queryIn.getColumnName( )).append(" in( ").append(queryIn.getKeywords( )).append(" )");
                                queryTotalSql.append(" ").append(queryIn.getColumnName( )).append(" in( ").append(queryIn.getKeywords( )).append(" )");
                            }
                            break;
                        // like 查询
                        case IQueryConf.QUERY_TYPE_LIKE:
                            bufferOuterSql.append(" ").append(queryIn.getColumnName( )).append(" like '%").append(queryIn.getKeywords( )).append("%'");
                            queryTotalSql.append(" ").append(queryIn.getColumnName( )).append(" like '%").append(queryIn.getKeywords( )).append("%'");
                            break;
                        //自定义语句片段
                        case IQueryConf.QURYY_TYPE_DEFINED:
                            bufferOuterSql.append(" ").append(queryIn.getKeywords( ));
                            queryTotalSql.append(" ").append(queryIn.getKeywords( ));
                            break;
                    }
                }
            }
            if (sortFields != null && sortFields.size( ) > 0) {
                int orderByRet = 0;
                bufferOuterSql.append(" ORDER BY ");
                Iterator<Map.Entry<String, Integer>> entryOrderBy = sortFields.entrySet( ).iterator( );
                while (entryOrderBy.hasNext( )) {
                    Map.Entry<String, Integer> entry = entryOrderBy.next( );
                    String column = entry.getKey( );
                    int orderType = entry.getValue( );
                    if (orderByRet == 0) {
                        if (orderType == 0) {
                            bufferOuterSql.append(" ").append(column.toUpperCase( )).append(" DESC ");
                        } else {
                            bufferOuterSql.append(" ").append(column.toUpperCase( )).append(" ASC ");
                        }
                    } else {
                        if (orderType == 0) {
                            bufferOuterSql.append(" , ").append(column.toUpperCase( )).append(" DESC ");
                        } else {
                            bufferOuterSql.append(" , ").append(column.toUpperCase( )).append(" ASC ");
                        }
                    }
                    orderByRet++;
                }
            }
            bufferOuterSql.append(" limit ").append((page - 1) * pageSize).append(" , ").append(pageSize);
            preparedStatement = conn.prepareStatement(bufferOuterSql.toString( ));
            rs = preparedStatement.executeQuery( );
            ResultSetMetaData data = rs.getMetaData( );
            List<Map<String, String>> mapList = Lists.newArrayList( );
            while (rs.next( )) {
                Map<String, String> map = new HashMap<>( );
                for (int i = 1; i <= data.getColumnCount( ); i++) {// 数据库里从 1 开始
                    String c = data.getColumnName(i);
                    String v = rs.getString(c);
                    map.put(c, v);
                }
                mapList.add(map);
            }
            ret = mapList.stream( ).map(p -> {
                List<String> singleList = Lists.newArrayList( );
                columnArr.forEach(k -> {
                    String val = p.get(k.get(0));
                    if (StringUtils.isEmpty(val)) {
                        singleList.add("");
                    } else {
                        String isCode = k.get(1);
                        if ("1".equals(isCode)) {
                            String codeType = k.get(2);
                            String newVal = featureCodeService.acquireValueByCodeAndTypeId(codeType, val);
                            singleList.add(newVal);
                        } else {
                            singleList.add(val);
                        }
                    }
                });
                return singleList;
            }).collect(Collectors.toList( ));
            preparedStatement = conn.prepareStatement(queryTotalSql.toString( ));
            rs = preparedStatement.executeQuery( );
            while (rs.next( )) {
                totalRecord += rs.getInt(1);
            }
        } catch (SQLException e) {
            LOGGER.error("查询数据失败：" + e.getMessage( ), e);
        } finally {
            try {
                if (rs != null) {
                    rs.close( );
                }
                if (preparedStatement != null) {
                    preparedStatement.close( );
                }
                DruidH2Pool.getInstance( ).close(conn);
            } catch (SQLException e1) {
                e1.printStackTrace( );
            }
        }
        logRet.setContent(ret);
        logRet.setTotalRecord(totalRecord);
        int pageCount = totalRecord % pageSize == 0 ? totalRecord % pageSize : totalRecord % pageSize + 1;
        logRet.setTotalPageCount(pageCount);
        logRet.setColumnArr(columnArr);
        return logRet;
    }

    /**
     * 保存日志缓存表
     *
     * @param columnArr
     */
    public void createLogCacheTb(List<List<String>> columnArr,String clientIp) {
        DruidH2Pool.getInstance( ).createCacheTb(LogQueryEhcacheOper.makeH2TbName(clientIp), columnArr);
    }

    /**
     * 批量创建缓存数据
     *
     * @param columnArr
     * @param data
     */
    public void createBathLogDataUseThread(List<String> columnArr, List<List<String>> data,String clientIp,boolean isNotExport) {
        DruidH2Pool.getInstance( ).createBathDataUseThread(LogQueryEhcacheOper.makeH2TbName(clientIp ), columnArr, data,isNotExport);
    }

    /**
     * 从缓存数据库中获取数据
     *
     * @param columnName 列名
     * @param keyword    参数值
     * @return
     */
    public Map<String, String> queryLogDetailById(String columnName, String keyword, List<List<String>> columnArr,String ipServ) {
        Connection conn = null;
        PreparedStatement preparedStatement = null;
        ResultSet rs = null;
        Map<String, String> ret = null;
        try {
            conn = DruidH2Pool.getInstance( ).getConnection( );
            String querySql = "select * from " + LogQueryEhcacheOper.makeH2TbName(ipServ ) + " where " + columnName + " =? ";
            preparedStatement = conn.prepareStatement(querySql);
            preparedStatement.setString(1, keyword);
            rs = preparedStatement.executeQuery( );
            ResultSetMetaData data = rs.getMetaData( );
            rs.next( );
            Map<String, String> map = new HashMap<>( );
            for (int i = 1; i <= data.getColumnCount( ); i++) {// 数据库里从 1 开始
                String c = data.getColumnName(i);
                String v = rs.getString(c);
                map.put(c, v);
            }
            ret = new HashMap<>( );
            for (List<String> k : columnArr) {
                String currName = k.get(0);
                String val = map.get(k.get(0));
                if (StringUtils.isEmpty(val)) {
                    ret.put(currName, "");
                } else {
                    String isCode = k.get(1);
                    if ("1".equals(isCode)) {
                        String codeType = k.get(2);
                        String newVal = featureCodeService.acquireValueByCodeAndTypeId(codeType, val);
                        ret.put(currName, newVal);
                    } else {
                        ret.put(currName, val);
                    }
                }
            }
        } catch (SQLException e) {
            LOGGER.error("保存数据出错" + e.getMessage( ), e);
        } finally {
            try {
                if (rs != null) {
                    rs.close( );
                }
                if (preparedStatement != null) {
                    preparedStatement.close( );
                }
                DruidH2Pool.getInstance( ).close(conn);
            } catch (SQLException e1) {
                e1.printStackTrace( );
            }
        }
        return ret;
    }


}
