package cn.ac.iie.di.ban.data.exchange.runner.slave.task.runTask;

import cn.ac.iie.di.ban.data.exchange.runner.commons.protocol.StatusEnum;
import cn.ac.iie.di.ban.data.exchange.runner.commons.protocol.task.order.TaskOrderRequest;
import cn.ac.iie.di.ban.data.exchange.runner.slave.QuerySlave;
import cn.ac.iie.di.ban.data.exchange.runner.slave.task.checkTask.CheckSQLAndPermissionTask;
import cn.ac.iie.di.commons.util.hdfs.HDFSUtil;
import cn.ac.iie.jdbc.DBrokerI.DBrokerDriver;
import cn.iie.jsqlparser.helper.JSQLParserHelper;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.*;
import java.sql.*;

/**
 * Created by Black-Laptop on 12/6/2017.
 */
public class QueryTaskThread extends Thread {

    private static final Logger LOG = LoggerFactory.getLogger(QueryTaskThread.class);

    private Boolean stopThread;

    private String sql;
    private String resultPath;
    private String dbHosts;
    private String dbUser;
    private String dbPassword;
    private String dbName;
    private String task_id;

    private String hdfsPath;
    private String srcPath;
    private String ftpIp;
    private String ftpUser;
    private String ftpPassword;
    private int ftpPort;

    public QueryTaskThread(TaskOrderRequest taskOrderRequest, JSQLParserHelper jsqlParserHelper, String dbHosts, String dbUser, String dbPassword, String dbName) throws Exception {
        this.sql = jsqlParserHelper.getSQLString();
        this.hdfsPath = taskOrderRequest.getResult_path();
        this.task_id = taskOrderRequest.getTask_id();
        this.dbHosts = dbHosts;
        this.dbUser = dbUser;
        this.dbPassword = dbPassword;
        this.dbName = dbName;
        this.srcPath = "D:\\a\\c\\";
        stopThread = false;
    }


    @Override
    public void run() {
        if (!stopThread) {

            LOG.info("sql:" + sql + "  resultPath" + resultPath + "  task_id" + task_id);
            LOG.info("dbHosts:" + dbHosts + "  dbUser:" + dbUser + "  dbPassword:" + dbPassword + "  dbName:" + dbName);

            /**
             * 数据库连接操作
             */
            String driverName = "cn.ac.iie.jdbc.DBrokerI.DBrokerDriver";

            try {
                Class<?> claz = Class.forName(driverName);
            } catch (ClassNotFoundException e) {
                QuerySlave.callAfterFinished.finished(StatusEnum.QUERY_TASK_ERROR, ExceptionUtils.getFullStackTrace(e));
                LOG.error(ExceptionUtils.getFullStackTrace(e));
            }

            DBrokerDriver driver = new DBrokerDriver();
            java.util.Properties info = new java.util.Properties();
            info.put("user", dbUser);
            info.put("password", dbPassword);
            String conStr = "jdbc.iie.DBroker://" + dbHosts + "/" + dbName + ";auth=noSasl";
            Connection conn = null;
            try {
                conn = driver.connect(conStr, info);
                Statement stmt = conn.createStatement();
                writeFile(stmt);
            } catch (Exception e) {
                QuerySlave.callAfterFinished.finished(StatusEnum.QUERY_TASK_ERROR, ExceptionUtils.getFullStackTrace(e));
                LOG.error(ExceptionUtils.getFullStackTrace(e));
            } finally {
                if (conn != null) {
                    try {
                        conn.close();
                    } catch (SQLException e) {
                        QuerySlave.callAfterFinished.finished(StatusEnum.QUERY_TASK_ERROR, ExceptionUtils.getFullStackTrace(e));
                        LOG.error(ExceptionUtils.getFullStackTrace(e));
                    }
                }
            }

        }
        QuerySlave.callAfterFinished.finished(StatusEnum.QUERY_TASK_SUCCESS, sql);

    }

    /**
     * 从数据库中获取信息之后对信息进行写入文件操作
     *
     * @param stmt
     * @throws SQLException
     */
    private void writeFile(Statement stmt) throws Exception {
        LOG.info("writeFile");
        ResultSet res = stmt.executeQuery(sql);
        ResultSetMetaData metaData = res.getMetaData();
        Object[] columns = new Object[metaData.getColumnCount()];
        for (int i = 0; i < columns.length; i++) {
            columns[i] = metaData.getColumnName(i + 1);
        }

        columns = changColumns(columns);


        /**
         * 计数器count，在这里每五百条记录放入一个文件中， 文件夹命名规则：task_id_num num就是第几个文件
         * 文件中信息的格式是json字符串
         */
        int count = 0;
        int lastName = 0;
        while (res.next()) {
            int a = count / 500;
            lastName = a;
            File srcdirs = new File(srcPath);
            dirExists(srcdirs);
            File srcFile = new File(srcPath + task_id + "_" + a);
            boolean isNewFile = fileExists(srcFile);
            PrintWriter pw = new PrintWriter(new OutputStreamWriter(new FileOutputStream(srcFile), "utf-8"), true);
//            PrintWriter pw = new PrintWriter(new OutputStreamWriter(ftpClient.appendFileStream(task_id + "_" + a), "utf-8"), true);
//            WriteFile file = new WriteFile(resultPath, task_id + "_" + a);
            JSONObject object = new JSONObject();
            for (int i = 1; i <= metaData.getColumnCount(); i++) {
                object.put((String) columns[i - 1], String.valueOf(res.getString(i)));
            }
//            file.writeFileAppend(object.toString());
            pw.println(object.toString());
            pw.flush();
            pw.close();
            count++;
            if (isNewFile) {
                if (a - 1 >= 0) {
                    try {
                        HDFSUtil.putFileFromLocal(srcPath + task_id + "_" + (a - 1), hdfsPath);
                        new File(srcPath + task_id + "_" + (a - 1)).delete();
                    } catch (Exception ex) {
                        LOG.error("upload the operator registration package to HDFS failed. Case:" + ex.getMessage(), ex);
                    }
                }
            }
        }
        try {
            HDFSUtil.putFileFromLocal(srcPath + task_id + "_" + lastName, hdfsPath);
            new File(srcPath + task_id + "_" + lastName).delete();
        } catch (Exception ex) {
            LOG.error("upload the operator registration package to HDFS failed. Case:" + ex.getMessage(), ex);
        }

    }

    private void dirExists(File file) {
        if (file.exists()) {
            if (file.isDirectory()) {

            } else {

                System.out.println("same name file exists");
            }
        } else {
//            System.out.println("dir not exists");
            file.mkdirs();
        }
    }

    private boolean fileExists(File file) {
        if (!file.exists()) {
            try {
                file.createNewFile();
                return true;
            } catch (IOException ex) {
                ex.printStackTrace();
            }
        } else {
            return false;
        }
        return false;

    }

    /**
     * 将发送队列中的列名转换回去
     *
     * @param columns
     * @return
     */
    private Object[] changColumns(Object[] columns) {
        JSONObject jsonPostResult = JSONObject.parseObject(CheckSQLAndPermissionTask.postResult);
        JSONObject jsonDataResult = (JSONObject) jsonPostResult.get("data");
        JSONArray array = jsonDataResult.getJSONArray("cols");

        for (int j = 0; j < columns.length; j++) {
            for (int i = 0; i < array.size(); i++) {
                JSONObject jsonColsResult = (JSONObject) array.get(i);
                if (columns[j].equals(jsonColsResult.get("srccolumn"))) {
                    columns[j] = jsonColsResult.get("showcolumn");
                }
            }
        }
        return columns;
    }

    public void stopThrad() {
        this.stopThread = true;
    }
}
