package com.navinfo.platform.sparksql.support;

import com.navinfo.platform.sparksql.api.SparkSQLOperations;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.sql.DataSource;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;

/**
 * 操作SparkSql的工具类
 */
public class SparkSQLTemplate implements SparkSQLOperations {
    private static final Logger logger = LoggerFactory.getLogger(SparkSQLTemplate.class);
    private DataSource dataSource;

    public SparkSQLTemplate(DataSource dataSource) {
        setDataSource(dataSource);
        if (getDataSource() == null) {
            throw new IllegalArgumentException("Property 'dataSource' is required");
        }
    }

    public void setDataSource(DataSource dataSource) {
        this.dataSource = dataSource;
    }
    public DataSource getDataSource() {
        return this.dataSource;
    }

    public <T> T query(String hql, ResultSetExtractor<T> rse) throws SparkSQLException {
        checkHql(hql);
        checkResultSetExtractor(rse);
        logger.debug("Executing SQL statement：{}", hql);

        Connection con = null;
        Statement stmt = null;
        ResultSet rs = null;
        try {
            con = dataSource.getConnection();
            stmt = con.createStatement();
            rs = stmt.executeQuery(hql);
            T result = rse.extractData(rs);
            JdbcUtils.handleWarnings(stmt);
            return result;
        }catch (Exception ex) {
            throw new SparkSQLException(hql+"执行失败："+ex.getMessage(), ex) ;
        }finally {
            JdbcUtils.closeResultSet(rs);
            JdbcUtils.closeStatement(stmt);
            JdbcUtils.closeConnection(con);
        }
    }

    public <T> T query(String hql, Object[] args, ResultSetExtractor<T> rse) throws SparkSQLException {
        checkHql(hql);
        checkArgs(args);
        checkResultSetExtractor(rse);
        logger.debug("Executing SQL statement：{}", hql);

        Connection con = null;
        PreparedStatement stmt = null;
        ResultSet rs = null;
        try {
            con =  dataSource.getConnection();
            stmt = con.prepareStatement(hql);
            //参数绑定
            for (int i = 0; i < args.length; i++) {
                stmt.setObject(i+1, args[i]);
            }
            rs = stmt.executeQuery();
            T result = rse.extractData(rs);
            JdbcUtils.handleWarnings(stmt);
            return result;
        }catch (Exception ex) {
            throw new SparkSQLException(hql+"执行失败："+ex.getMessage(), ex) ;
        }finally {
            JdbcUtils.closeResultSet(rs);
            JdbcUtils.closeStatement(stmt);
            JdbcUtils.closeConnection(con);
        }
    }

    public void query(String hql, ResultSetCallback rsCallback) throws SparkSQLException {
        query(hql, rs -> {
            rsCallback.processResultSet(rs);
            return null;
        });
    }

    public void query(String hql, Object[] args, ResultSetCallback rsCallback) throws SparkSQLException {
        query(hql, args, rs -> {
            rsCallback.processResultSet(rs);
            return null;
        });
    }

    public <T> T queryForObject(String hql, ResultSetRowMapper<T> rowMapper) throws SparkSQLException {
        return query(hql, rs -> {
            T result = null;
            if(rs.next()){
                result = rowMapper.mapRow(rs,0);
            }
            return result;
        });
    }

    public <T> T queryForObject(String hql, Object[] args, ResultSetRowMapper<T> rowMapper) throws SparkSQLException {
        return  query(hql, args, rs -> {
            T result = null;
            if(rs.next()){
                result = rowMapper.mapRow(rs,0);
            }
            return result;
        });
    }

    public <T> List<T> queryForList(String hql, ResultSetRowMapper<T> rowMapper) throws SparkSQLException {
        return query(hql, rs -> {
            List<T> results =  new ArrayList<T>();
            int rowNum = 0;
            while (rs.next()) {
                results.add(rowMapper.mapRow(rs, rowNum++));
            }
            return results;
        });
    }

    public <T> List<T> queryForList(String hql, Object[] args, ResultSetRowMapper<T> rowMapper) throws SparkSQLException {
        return query(hql, args, rs -> {
            List<T> results =  new ArrayList<T>();
            int rowNum = 0;
            while (rs.next()) {
                results.add(rowMapper.mapRow(rs, rowNum++));
            }
            return results;
        });
    }

    /**
     * 检查Hql是否是select带where条件或limit限制
     * @param hql
     * @return
     */
    private void checkHql(String hql) throws SparkSQLException{
        if(null == hql){
            throw new SparkSQLException("hql不能为null");
        }

        String lowerStr = hql.toLowerCase();
        boolean flag = lowerStr.indexOf("select")>-1 && (lowerStr.indexOf("where")>-1 || lowerStr.lastIndexOf("limit")>-1);

        //必须是select语句，带where或limit条件
        if(!flag){
            throw new SparkSQLException("hql必须为select查询语句，带where条件或limit限制");
        }
    }

    private void checkArgs(Object[] args){
        if(null == args){ throw new SparkSQLException("args不能为null");}
    }

    private void checkResultSetExtractor(ResultSetExtractor rsExtractor){
        if(null == rsExtractor){throw new SparkSQLException("rsExtractor不能为null");}
    }

    private void checkResultSetCallback(ResultSetCallback rsCallback){
        if(null == rsCallback){throw new SparkSQLException("ResultSetCallback不能为null");}
    }

    private void checkResultSetRowMapper(ResultSetRowMapper rowMapper){
        if(null == rowMapper){throw new SparkSQLException("ResultSetRowMapper不能为null");}
    }
}
