const fs = require("fs");
const path = require("path");
const { parentPort } = require("worker_threads");
const { Sequelize, DataTypes } = require("sequelize");

let status = 0;
let logDirPath;
let works = [];
let tbMap;
let tbName;
parentPort.on("message", async ({ msgType, payload }) => {
  if (msgType === "gotoWork") {
    logDirPath = payload.logDirPath;
    tbName = payload.tbName;
    if (status === 1) {
      parentPort.postMessage({ msgType: "iambusy", payload: payload.data });
    } else {
      await onGotoWork(payload.data);
    }
  } else if (msgType === "init") {
    await init(payload);
  }
});

async function init({
  tbNames = [],
  seqConfig,
  modelDirPath,
  csvDirPath,
  threadId: tid,
  csvExtension,
}) {
  const sequelize = new Sequelize(
    seqConfig.database,
    seqConfig.username,
    seqConfig.password,
    {
      host: seqConfig.host,
      port: seqConfig.port,
      dialect: "mysql",
      logging: false,
    }
  );
  const tbNameList = fs.readdirSync(path.join(process.cwd(), csvDirPath));
  tbMap = tbNames.reduce((p, k) => {
    p[k] = {};
    return p;
  }, {});
  tbNameList.forEach((fileName) => {
    const tbName = fileName.replace(csvExtension, "");
    if (tbMap[tbName]) {
      const defineFunc = require(path.join(
        process.cwd(),
        modelDirPath,
        tbName + ".js"
      ));
      tbMap[tbName] = defineFunc(sequelize, DataTypes);
    }
  });
  await sequelize.sync();
  parentPort.postMessage({ msgType: "load", payload: 1 });
  threadId = tid;
}

async function onGotoWork(datas) {
  status = 1;
  parentPort.postMessage({ msgType: "statusChange", payload: 1 });
  works = datas;
  while (works.length) {
    await flushWork(works);
  }
  parentPort.postMessage({ msgType: "statusChange", payload: 0 });
  status = 0;
}

async function flushWork(datas) {
  return new Promise((resolve) => {
    batchWriteToTb(datas).then(resolve);
  });
}

async function batchWriteToTb(rowList) {
  return new Promise((resolve) => {
    const table = tbMap[tbName];
    if (!table) return;
    table
      .bulkCreate(rowList)
      .then(() => {
        works.splice(0, rowList.length);
        resolve();
      })
      .catch(async (e) => {
        if (rowList.length > 1) {
          count = Math.max(1, Math.floor(rowList.length / 2));
          await flushWork(rowList.slice(0, count)).then(resolve);
        } else {
          parentPort.postMessage({ msgType: "errorCount", payload: 1 });
          writeLog(e, rowList[0]);
          works.splice(0, 1);
          resolve();
        }
      });
  });
}

function writeLog(e, row) {
  const dirPath = path.join(process.cwd(), logDirPath);
  if (!fs.existsSync(dirPath)) {
    throw new Error(`请先创建错误日志目录 ${dirPath}`);
  }
  const filePath = path.join(dirPath, tbName + ".log");
  fs.appendFileSync(filePath, `\n${JSON.stringify(row)}\n${getErrMsg(e)}\n`);
}

function getErrMsg(e) {
  const { errors, fields, parent, original, sql, stack } = e;
  return `${stack}
    errors:${JSON.stringify(errors)}
    fields:${JSON.stringify(fields)}
    parent:${parent.stack}
    original:${original.stack}
    sql:${sql}
  `;
}
