package co.sugon.log.searchEngineService;


import co.sugon.log.searchEngineUtils.SparkJdbcUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;

/**
 * ClassName:   SparkSQLEngineService.java
 * <p>
 */
public class SparkSQLEngineService implements SQLEngineService {

    private static final Logger logger = LoggerFactory.getLogger(SparkSQLEngineService.class);


    /**
     * executeQuery:
     * <p>
     *
     * @param sql
     * @return
     *
     * @since   v1.1
     */
    @Override
    public List<Map<String, Object>> executeQuery(String sql) {
        List<Map<String, Object>> list = null;
        try {
            System.out.println("执行 SparkJdbcUtil.getInstance().querySqlForKeyValue; sql:" + sql);
            list = SparkJdbcUtil.getInstance().querySqlForKeyValue(sql);
        } catch (Exception e) {
            e.printStackTrace();
            System.out.println("执行 SparkJdbcUtil.getInstance().querySqlForKeyValue 失败; sql:" + sql);
        }
        if (list == null) {
            list = new ArrayList<Map<String,Object>>();
        }
        return list;
    }

    /**
     * dropTable:
     * <p>
     *
     * @param tableName
     * @return
     *
     * @since   v1.1
     */
    @Override
    public boolean dropTable(String tableName) {
        String sql = "drop table " + tableName;
        return executeSQL(sql);
    }

    /**
     * createTable:
     * <p>
     *
     * @param tableName
     * @param sql
     * @return
     *
     * @since   v1.1
     */
    @Override
    public boolean createTable(String tableName, String sql) {
        sql = "create table " + tableName + " as " + sql;
        return executeSQL(sql);
    }

    @Override
    public boolean executeSQL(String sql) {
        boolean result = false;
        try {
            System.out.println("执行 SparkJdbcUtil.getInstance().excuteSql; sql:" + sql);
            result = SparkJdbcUtil.getInstance().excuteSql(sql);
            System.out.println("执行 SparkJdbcUtil.getInstance().excuteSql 成功; sql:" + sql);

        } catch (Exception e) {
            e.printStackTrace();
            System.out.println("执行 SparkJdbcUtil.getInstance().excuteSql 失败; sql:" + sql);
        }
        return result;
    }

	@Override
	public List<Object[]> queryRows(String sql) {
		List<Object[]> result = null;

		try {
			System.out.println("执行 SparkJdbcUtil.getInstance().queryRows; sql:" + sql);
			result = SparkJdbcUtil.getInstance().queryRows(sql);
			System.out.println("执行 SparkJdbcUtil.getInstance().queryRows 成功; sql:" + sql);
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
			System.out.println("执行 SparkJdbcUtil.getInstance().queryRows 失败; sql:" + sql);
		} 
		
		if(null == result){
			result = new ArrayList<Object[]>();
		}
		
		return result;
	}

    @Override
    public long getCount(String sql) {

        long count=0l;
        try {
            logger.info("执行 PrestoJdbcUtil.getInstance().getCount; sql:" + sql);
            count = SparkJdbcUtil.getInstance().countQuery(sql);
        } catch (Exception e) {
            e.printStackTrace();
            logger.info("执行 PrestoJdbcUtil.getInstance().getCount 失败; sql:" + sql);
        }

        return count;
    }

}
