const MainWorker = require("./worker/main");
const ProgressBar = require("./process");
const readFileLine = require("./readFileLine");
const fs = require("fs");
const path = require("path");
// 对用户配置进行说明
function description({
  csvDirPath,
  modelDirPath,
  logDirPath,
  workerNum,
  batchNum,
  tbNames,
}) {
  // csv文件目录
  console.log(`导入数据目录为：${path.join(process.cwd(), csvDirPath)}`);
  // 数据表定义目录
  console.log(`数据表定义目录为：${path.join(process.cwd(), modelDirPath)}`);
  // 日志目录
  console.log(`错误日志查看目录为：${path.join(process.cwd(), logDirPath)}`);
  // 子线程数目
  console.log(`子线程数目为：${workerNum} 个`);
  // 每次批量上传的数据量
  console.log(`每次批量上传的数据量为：每次 ${batchNum} 条`);
  // 要上传的表名
  console.log(`要上传的表名为：${tbNames.join(",")}`);
}
// 开始倒计时给用户取消的反应时间
function stepCount(i = 3) {
  console.log(i);
  return new Promise((resolve) => {
    setTimeout(() => {
      if (i > 0) {
        stepCount(i - 1).then(resolve);
      } else resolve();
    }, 1000);
  });
}

async function syncsql({
  csvDirPath,
  modelDirPath,
  logDirPath = "./log",
  seqConfig,
  workerNum = 6,
  batchNum = 300,
  startIndex = 0,
  endIndex,
  headHandler,
  rowHandler,
  hasThRow = true,
  csvExtension = ".csv",
  tbNames = [],
  tbMaps,
  filter
} = {}) {
  let tbIndex = startIndex;
  let tbCount = endIndex;
  if ( Object.prototype.toString.apply(tbMaps) === '[object Object]') {
    tbNames = Object.keys(tbMaps);
  } else if (!Array.isArray(tbNames) || tbNames.length === 0) {
    const csvList = fs.readdirSync(path.join(process.cwd(), csvDirPath));
    tbNames = csvList
    .map((fileName) => fileName.replace(csvExtension, ""))
    .slice(tbIndex, tbCount);
  }
  if (!tbMaps) {
    tbMaps = tbNames.reduce((p, tn) => {
      p[tn] = [tn];
      return p;
    },{})
  }
  if (!tbCount) tbCount = tbNames.length;
  if (Array.isArray(tbNames) && tbNames.length) {
    description({
      csvDirPath,
      modelDirPath,
      logDirPath,
      workerNum,
      batchNum,
      tbNames,
    });
    await stepCount();
    console.log("开始初始化数据库");
    const mainWorker = new MainWorker({
      workerNum,
      batchNum,
      logDirPath,
      tbNames,
    });
    await mainWorker.initSequlize({
      tbNames,
      csvExtension,
      csvDirPath,
      modelDirPath,
      seqConfig,
    });
    console.log("初始化数据库成功");
    tbIndex = 0;
    tbCount = tbNames.length;
    while (tbIndex < tbCount) {
      const tbName = tbNames[tbIndex];
      const fileNames = (tbMaps[tbName] || []);
      let fileIndex = 0;
      while (fileIndex < fileNames.length) {
        const fileName = fileNames[fileIndex] + csvExtension;
        console.log("\t开始处理 " + fileName);
        await syncTable({
          dataFilePath: path.join(process.cwd(), csvDirPath, fileName),
          mainWorker,
          tbName,
          headHandler,
          rowHandler,
          batchNum,
          hasThRow,
          filter
        });
        await mainWorker.waitFinished();
        console.log("\n\t处理完毕 " + fileName);
        fileIndex++;
      }
      tbIndex++;
    }
    await mainWorker.stopWork();
  }
}

async function syncTable({
  mainWorker,
  headHandler,
  batchNum,
  hasThRow,
  rowHandler,
  tbName,
  dataFilePath,
  filter
}) {
  // 初始化一个进度条长度为 50 的 ProgressBar 实例
  const fileSize = fs.statSync(dataFilePath).size;
  // 100M以内的数据一次性读取，按行导入；
  if (fileSize < 1024 * 1024 * 100) {
    await readyAllContent({
      mainWorker,
      headHandler,
      batchNum,
      hasThRow,
      rowHandler,
      tbName,
      dataFilePath,
      filter
    });
  } else {
    // 超过 100M 数据，逐行读取
    await readyInLine({
      mainWorker,
      headHandler,
      batchNum,
      hasThRow,
      rowHandler,
      tbName,
      dataFilePath,
      filter
    });
  }
}

async function readyAllContent({
  mainWorker,
  headHandler,
  batchNum,
  hasThRow,
  rowHandler,
  tbName,
  dataFilePath,
  filter
}) {
  const pbh = new ProgressBar(`导入 ${tbName} 表进度 单位行`, 30);
  console.log(`开始读取 ${tbName}`);
  let content = fs.readFileSync(dataFilePath).toString();
  console.log(`读取 ${tbName} 完毕`);
  let rows = content.split(/\r?\n/g);
  const thRow = hasThRow ? rows.splice(0, 1)[0] : null;
  const thead = headHandler(tbName, thRow);
  await mainWorker.registerTable(tbName);
  let rowIndex = 0;
  const total = rows.length;
  let rowCount = rows.length;
  while (rowCount > 0) {
    const originRowList = rows.splice(0, batchNum);
    rowIndex += batchNum;
    pbh.render({ completed: Math.min(rowIndex, total), total });
    const rowList = [];
    originRowList.forEach((row, ri) => {
      const rowData = rowHandler(thead, tbName, row, rowIndex - batchNum + ri);
      if(!filter || filter(tbName,row,ri)) rowList.push(rowData)
    }
  )
   if(rowList.length) await mainWorker.reciveWorks(rowList);
    rowCount = rows.length;
  }
}
async function readyInLine({
  mainWorker,
  headHandler,
  batchNum,
  hasThRow,
  rowHandler,
  tbName,
  dataFilePath,
  filter
}) {
  let thead;
  console.log("文件 size 大于100Mb将进行逐行读取");
  const pbh = new ProgressBar(`导入 ${tbName} 表进度 单位b`, 30);
  const rowList = [];
  const totalSize = fs.statSync(dataFilePath).size;
  let currentSize = 0;
  let workerStatus = 0;
  await mainWorker.registerTable(tbName);
  await readFileLine({
    filePath: dataFilePath,
    getRow: async (line, ri) => {
      if (hasThRow && ri === 0) {
        thead = headHandler(tbName, line);
      } else {
        currentSize += Buffer.from(line).length;
        pbh.render({
          completed: Math.min(currentSize, totalSize),
          total: totalSize,
        });
        const row = rowHandler(thead, tbName, line, ri);
        if(!filter || filter(tbName,row,ri)) rowList.push(row);
        if (rowList.length >= batchNum) {
          // 整数倍导入列表
          await __driveToWork();
        }
      }
    },
  });
  // 清空尾巴
  await __driveToWork();
  pbh.render({
    completed: totalSize,
    total: totalSize,
  });
  async function __driveToWork() {
    if (workerStatus === 1) return;
    workerStatus = 1;
    while (rowList.length) {
      await mainWorker.reciveWorks(rowList.splice(0, batchNum));
    }
    workerStatus = 0;
  }
}

module.exports = syncsql;
