package com.ganqiang1983.dbdiff;

import java.io.File;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Lock;

import org.apache.log4j.Logger;

import com.ganqiang1983.dbdiff.ScheduleManager.JOB_TYPE;

/**
 * 消费者 从query中获取Job并且处理相关工作流程
 */
public class Worker implements Runnable
{
  private static Logger logger = Logger.getLogger(Worker.class);
  private BlockingQueue<Job> pool;
  private BlockingQueue<Job> nextPool;
  private JOB_TYPE type;
  private AtomicInteger count;
  private Lock lock;

  // for diff
  public Worker(JOB_TYPE type, BlockingQueue<Job> pool, AtomicInteger count,
      Lock lock)
  {
    this.pool = pool;
    this.type = type;
    this.count = count;
    this.lock = lock;
  }

  // for dump & sort
  public Worker(JOB_TYPE type, BlockingQueue<Job> pool,
      BlockingQueue<Job> nextPool)
  {
    this.nextPool = nextPool;
    this.pool = pool;
    this.type = type;
  }

  @Override
  public void run()
  {
    if (JOB_TYPE.DUMP.equals(type)) {
      logger.info("[Dump-Worker-Thread] start");
      try {
        doDump();
      } catch (InterruptedException e) {
        System.exit(1);
      }
      logger.info("[Dump-Worker-Thread] end");
    } else if (JOB_TYPE.SORT.equals(type)) {
      logger.info("[Sort-Worker-Thread] start");
      try {
        doSort();
      } catch (InterruptedException e) {
        System.exit(1);
      }
      logger.info("[Sort-Worker-Thread] end");
    } else if (JOB_TYPE.DIFF.equals(type)) {
      logger.info("[Compare-Worker-Thread] start");
      try {
        doDiff();
      } catch (InterruptedException e) {
        System.exit(1);
      }
      logger.info("[Compare-Worker-Thread] end");
    }
  }

  /**
   * step 1 : dump
   *
   * @throws InterruptedException
   */
  private void doDump() throws InterruptedException
  {
    // 1.从D_QUEUE队列中获取job
    Job job = null;
    try {
      job = pool.take();
    } catch (InterruptedException e) {
      logger.error(
          "[Dump-Worker-Thread] failed to get job from the dump queue", e);
      throw new InterruptedException(
          "[Dump-Worker-Thread] failed to get job from the dump queue : "
              + e.getMessage());
    }

    // 2.创建比对目录及其目录下的文件
    String dirPath = FileMaker.mkTableDir(job.getPairId());
    String[] newStr = job.getPairId().split(DBDiffConfUtil.SPLIT_STR);
    String leftFileName = FileMaker.getLeftFileName(dirPath, newStr[0]);
    String rightFileName = FileMaker.getRightFileName(dirPath, newStr[1]);

    // 3.根据job获取pair的元数据信息，拼凑sql语句，去数据库查询，并且生成一个左表文件
    EventManager eventMgr = new EventManager();
    int leftcount = 0;
    try {
      // 3.2 校验配置文件中的数据节点
      eventMgr.onInit(tnode, DBDiffConfUtil.notificationMgr);

      // 打开left数据源中FileWriter,BufferedWriter
      DataObjectBatch dob = new DataObjectBatch();
      dob.setBatchInfo(leftFileName);
      eventMgr.onBegin(FileMaker.createEvent(EventType.BEGIN, dob));
      // 循环遍历left源组中的每个表，且全部将数据写到一个文件中
      Map<String, Table> lmap = job.getLeftMap();
      for (String left : lmap.keySet()) {
        String[] lt = left.split(DBDiffConfUtil.PRE_DATASOURCE);
        String lid = lt[0];
        Table table = lmap.get(left);
        // 3.2 查询sql
        JdbcSupport leftJdbcSupport = new JdbcSupport(
            JobCollector.DSTYPE_MAP.get(lid),
            DBToolkitUtils
                .convertDbSourceNode2DsProperties(DBDiffConfUtil.LEFT_DS_NODE_MAP
                    .get(lid)), JobCollector.DS_MAP.get(lid).getConnection());

        // 3.3 根据datasourceid获取相应的meta
        DatabaseMetaData meta = JobCollector.METADATA_MAP.get(lid);
        // 3.4 分页查询，并且将数据写到文件中
        HashMap<String, String> map = FileMaker.writePageFile(eventMgr, meta, lid,
            table, job.getLeftCondition(), leftJdbcSupport, leftFileName, job.getPairId());
        boolean lflag = Boolean.valueOf(map.get("flag"));
        leftcount += Integer.valueOf(map.get("count"));
        if (lflag) {
          logger.info("[Dump-Worker-Thread] left table [" + lid + ":"
              + table.getSchemaName() + "." + table.getTableName() + "]"
              + " of left dump file [" + leftFileName + "] write success");
        } else {
          InterruptedException e = new InterruptedException(
              "[Dump-Worker-Thread] left table [" + lid + ":"
                  + table.getSchemaName() + "." + table.getTableName() + "]"
                  + " of left dump file [" + leftFileName + "] write error ");
          logger.error(
              "[Dump-Worker-Thread] left table [" + lid + ":"
                  + table.getSchemaName() + "." + table.getTableName() + "]"
                  + " of left dump file " + leftFileName + " write error", e);
          throw e;
        }
      }
    } catch (DBToolkitCheckedException e) {
      logger.error(
          "[Dump-Worker-Thread] Writing left dump files failure : "
              + e.getMessage(), e);
      throw new InterruptedException("[Dump-Worker-Thread] Writing left dump files failure : " + e.getMessage());
    } catch (SQLException e) {
      logger.error("[Dump-Worker-Thread] Cannot to create left connection: "
          + e.getMessage(), e);
      throw new InterruptedException("[Dump-Worker-Thread] Cannot to create left connection : " + e.getMessage());
    } finally {
      try {
        // 关闭FileWriter,BufferedWriter
        eventMgr.onCommit(FileMaker.createEvent(EventType.COMMIT, null));
      } catch (DBToolkitCheckedException e) {
        logger.error(
            "[Dump-Worker-Thread] Close left FileWriter failure : "
                + e.getMessage(), e);
        throw new InterruptedException("[Dump-Worker-Thread] Close left FileWriter failure : " + e.getMessage());
      }
    }

    // 4.根据job获取pair的元数据信息，拼凑sql语句，去数据库查询，并且生成一个右表文件
    int rightcount = 0;
    try {
      // 打开right数据源中FileWriter,BufferedWriter
      DataObjectBatch dob = new DataObjectBatch();
      dob.setBatchInfo(rightFileName);
      eventMgr.onBegin(FileMaker.createEvent(EventType.BEGIN, dob));
      // 循环遍历right源组中的每个表，且全部将数据写到一个文件中
      Map<String, Table> rmap = job.getRightMap();
      for (String right : rmap.keySet()) {
        String[] rt = right.split(DBDiffConfUtil.PRE_DATASOURCE);
        String rid = rt[0];
        Table table = rmap.get(right);

        // 4.2 查询sql
        JdbcSupport rightJdbcSupport = new JdbcSupport(
            JobCollector.DSTYPE_MAP.get(rid),
            DBToolkitUtils
                .convertDbSourceNode2DsProperties(DBDiffConfUtil.RIGHT_DS_NODE_MAP
                    .get(rid)), JobCollector.DS_MAP.get(rid).getConnection());

        // 4.3 分页查询，并且将数据写到文件中
        DatabaseMetaData meta = JobCollector.METADATA_MAP.get(rid);
        HashMap<String, String> map = FileMaker.writePageFile(eventMgr, meta, rid,
            table, job.getRightCondition(), rightJdbcSupport, rightFileName, job.getPairId());
        boolean rflag = Boolean.valueOf(map.get("flag"));
        rightcount += Integer.valueOf(map.get("count"));
        if (rflag) {
          logger.info("[Dump-Worker-Thread] right table [" + rid + ":"
              + table.getSchemaName() + "." + table.getTableName() + "]"
              + " of right dump file [" + leftFileName + "] write success");
        } else {
          InterruptedException e = new InterruptedException(
              "[Dump-Worker-Thread] right table [" + rid + ":"
                  + table.getSchemaName() + "." + table.getTableName() + "]"
                  + " of right dump file [" + leftFileName + "] write error ");
          logger.error(
              "[Dump-Worker-Thread] right table [" + rid + ":"
                  + table.getSchemaName() + "." + table.getTableName() + "]"
                  + " of right dump file [" + leftFileName + "] write error", e);
          throw e;
        }
      }
    } catch (DBToolkitCheckedException e) {
      logger.error(
          "[Dump-Worker-Thread] Writing right dumpfiles failure : "
              + e.getMessage(), e);
      throw new InterruptedException("[Dump-Worker-Thread] Writing right dumpfiles failure : " + e.getMessage());
    } catch (SQLException e) {
      logger.error("[Dump-Worker-Thread] Cannot to create right connection: "
          + e.getMessage(), e);
      throw new InterruptedException("[Dump-Worker-Thread] Cannot to create right connection : " + e.getMessage());
    } finally {
      // 关闭FileWriter,BufferedWriter
      try {
        eventMgr.onCommit(FileMaker.createEvent(EventType.COMMIT, null));
      } catch (DBToolkitCheckedException e) {
        logger.error(
            "[Dump-Worker-Thread] Close right FileWriter failure : "
                + e.getMessage(), e);
        throw new InterruptedException("[Dump-Worker-Thread] Close right FileWriter failure : " + e.getMessage());
      }
    }

    // 5.设置表的记录总数
    job.setLeftCount(leftcount);
    job.setRightCount(rightcount);

    // 6.特殊情况：如果文件夹名称为UUID(即：此文件夹名称长度过长)，需要记录该pairId与UUID对应关系，
    // 主要用于：a.dbdiff下次如果以proceed模式启动时会自动查找到该文件夹;b.找到该UUID文件夹所对应的pair
    String folderName = dirPath.replaceAll(DBDiffConfUtil.DIFF_PATH + File.separator, "");
    if (folderName.indexOf(DBDiffConfUtil.SPLIT_STR) == -1) {
      // 设置dbdiff文件夹别名
      job.setIsAlias(true);
      job.setAlias(folderName);
    } else {
      job.setIsAlias(false);
    }

    // 7.将summary file中信息记录到job file中
    boolean sflag = FileMaker.writeJobFileForSummary(job);
    if (sflag) {
      logger.info("[Dump-Worker-Thread] The dump summary file of pairId=[" + job.getPairId() + "] write successful.");
    } else {
      InterruptedException e = new InterruptedException(
          "[Dump-Worker-Thread] The dump summary file of pairId=[" + job.getPairId()
              + "] write error.");
      logger.error("[Dump-Worker-Thread] The dump summary file of pairId=[" + job.getPairId()
          + "] write error.", e);
      if (DBDiffConfUtil.notificationMgr != null) {
        Notification notification = new Notification(NotificationLevel.ERROR,
            null, Worker.class, new RuntimeException(),
            DBDiffConfUtil.PRE_MAIL_TITLE + "Dump failure",
            "The dump summary file of pairId=[" + job.getPairId() + "] write error.");
        DBDiffConfUtil.notificationMgr.notify(notification, false);
      }
      throw e;
    }

    // 8.将目前状态记录到jobfile中：dump工作
    boolean jobflag = FileMaker.writeJobFileForDump(job.getPairId());
    if (jobflag) {
      logger.info("[Dump-Worker-Thread] The dump job status file of pairId=["
          + job.getPairId() + "] write successful.");
    } else {
      InterruptedException e = new InterruptedException(
          "[Dump-Worker-Thread] The dump job status file of pairId=[" + job.getPairId()
              + "] write error.");
      logger.error(
          "[Dump-Worker-Thread] The dump job status file of pairId=[" + job.getPairId()
              + "] write error.", e);
      throw e;
    }

    // 9.将比对路径加入到S_QUEUE中
    job.setType(JOB_TYPE.SORT);
    job.setPath(dirPath);
    // 作为下一个队列即S_QUEUE的生产者
    nextPool.put(job);
    logger.info("[Dump-Worker-Thread] Add a pair of [" + job.getPairId()
        + "] to the sort queue ");
  }

  /**
   * step 2 : sort
   *
   * @throws InterruptedException
   */
  private void doSort() throws InterruptedException
  {
    // 1.从S_QUEUE获取job
    Job job = null;
    try {
      job = pool.take();
    } catch (InterruptedException e) {
      logger.error(
          "[Sort-Worker-Thread] failed to get job from the sort queue", e);
      throw new InterruptedException(
          "[Sort-Worker-Thread] failed to get job from the sort queue : "
              + e.getMessage());
    }
    // 2.sort程序
    boolean sflag = FileMaker.sort(job.getPath(), job.getPairId());
    if (sflag) {
      logger.info("[Sort-Worker-Thread] Sort the dump files of parid=[" + job.getPairId()
          + "] successfully.");
    } else {
      InterruptedException e = new InterruptedException(
          "[Sort-Worker-Thread] Sort the dump files of parid=[" + job.getPairId() + "] failure.");
      logger.error("[Sort-Worker-Thread] Sort the dumps file of parid=[" + job.getPairId()
          + "] failure.", e);
      throw e;
    }
    // 3.如果开启了删除中间临时文件，那么就物理删除两个原始文件
    if (ScheduleManager.newInstance().IS_DELETE_TEMPFILE) {
      boolean flag = FileMaker.rmDumpFile(job.getPath());
      if (flag) {
        logger.info("[Sort-Worker-Thread] Delete the dump files of parid=[" + job.getPairId()
            + "] successfully.");
      } else {
        InterruptedException e = new InterruptedException(
            "[Sort-Worker-Thread] Delete the dump files of parid=[" + job.getPairId()
                + "] failure.");
        logger.error(
            "[Sort-Worker-Thread] Delete the dump files of parid=[" + job.getPairId()
                + "] failure", e);
        throw e;
      }
    }

    // 4.将summary file中信息记录先到job file中
    boolean suflag = FileMaker.writeJobFileForSummary(job);
    if (suflag) {
      logger.info("[Sort-Worker-Thread] The sort summary file of parid=[" + job.getPairId()
          + "] write successful.");
    } else {
      InterruptedException e = new InterruptedException(
          "[Sort-Worker-Thread] The sort summary file of parid=[" + job.getPairId()
              + "] write failure ");
      logger.error("[Sort-Worker-Thread] The sort summary file of parid=[" + job.getPairId()
          + "] write failure", e);
      throw e;
    }

    // 5.将目前状态记录到jobfile中：sort工作finish
    boolean jobflag = FileMaker.writeJobFileForSort(job.getPairId());
    if (jobflag) {
      logger.info("[Sort-Worker-Thread] The sort job status file of parid=["
          + job.getPairId() + "] write successful.");
    } else {
      InterruptedException e = new InterruptedException(
          "[Sort-Worker-Thread] The sort job status file of parid=[" + job.getPairId()
              + "] write failure.");
      logger.error(
          "[Sort-Worker-Thread] The sort job status file of parid=[" + job.getPairId()
              + "] write failure.", e);
      throw e;
    }

    // 5.并且将相对路径加入到C_QUEUE中
    job.setType(JOB_TYPE.DIFF);
    nextPool.put(job); // 作为下一个队列即C_QUEUE的生产者
    logger.info("[Sort-Worker-Thread] Add a pair of [" + job.getPairId()
        + "] to the diff queue");
  }

  /**
   * step 3 : diff
   *
   * @throws InterruptedException
   */
  private void doDiff() throws InterruptedException
  {
    // 1.从C_QUEUE获取job
    Job job = null;
    try {
      job = pool.take();
    } catch (InterruptedException e) {
      logger
          .error(
              "[Compare-Worker-Thread] failed to get job from the compare queue",
              e);
      throw new InterruptedException(
          "[Compare-Worker-Thread] failed to get job from the compare queue : "
              + e.getMessage());
    }

    // 3.diff
    boolean dflag = FileMaker.diff(job.getPath(), job.getPairId());
    if (dflag) {
      logger.info("[Compare-Worker-Thread] Compare the sort files of pairid=[" + job.getPairId()
          + "] successfully.");
    } else {
      InterruptedException e = new InterruptedException(
          "[Compare-Worker-Thread] Compare the sort files of pairid=["
              + job.getPairId() + "] failure");
      logger.error("[Compare-Worker-Thread] Compare the sort files of pairid=["
              + job.getPairId() + "] failure", e);
      throw e;
    }

    // 4.如果设置了删除临时文件，那么就物理删除.sorted文件
    if (ScheduleManager.newInstance().IS_DELETE_TEMPFILE) {
      FileMaker.rmSortedFile(job.getPath());
    }

    // 5.将summary file中信息记录先到job file中
    boolean suflag = FileMaker.writeJobFileForSummary(job);
    if (suflag) {
      logger.info("[Compare-Worker-Thread] The compare summary file of pairid=["
          + job.getPairId() + "] write successful.");
    } else {
      InterruptedException e = new InterruptedException(
          "[Compare-Worker-Thread] The compare summary file of pairid=[" + job.getPairId()
              + "] write failure");
      logger.error(
          "[Compare-Worker-Thread] The compare summary file of pairid=[" + job.getPairId()
              + "] write failure", e);
      throw e;
    }

    // 6.将目前状态记录到jobfile中：diff工作finish
    boolean jobflag = FileMaker.writeJobFileForDiff(job.getPairId());
    if (jobflag) {
      logger.info("[Compare-Worker-Thread] The compare job status file of pairid=["
          + job.getPairId() + "] write successful.");
    } else {
      InterruptedException e = new InterruptedException(
          "[Compare-Worker-Thread] The compare job status file of pairid=[" + job.getPairId()
              + "] write failure");
      logger.error(
          "[Compare-Worker-Thread] The compare job status file of pairid=[" + job.getPairId()
              + "] write failure", e);
      throw e;
    }

    // 7. 设置当前完成的工作数count
    lock.lock();
    count.getAndIncrement();
    lock.unlock();

    logger.info("[Compare-Worker-Thread] The pair of [" + job.getPairId()
        + "] diff completed");
  }
}
