/* eslint-disable no-await-in-loop */
require('dotenv').config();
const { CronJob } = require('cron');
const { Client } = require('pg');
const async = require('async');
const ax = require('axios');
const https = require('https');
const log = require('../utils/log');
const { group } = require('../utils/utils');
const config = require('../config/index');

const MAX_CONCURRENCY = 50;
const containerIdx = config.idx;
const { region } = config;

class Detection {
  constructor(cron) {
    this.data = [];
    const job = new CronJob(
      cron,
      (() => {
        this.processData();
      }),
    );
    job.start();

    this.axios = ax.create({
      httpsAgent: new https.Agent({
        rejectUnauthorized: false,
      }),
      timeout: 15_000,
    });
  }

  async processData() {
    const logPrefix = `${containerIdx} -- processData`;
    if (Array.isArray(this.data) && this.data.length > 0) {
      const callback = async (item) => {
        const res = {
          work_order_id: item.work_order_id,
          incident_url: item.incident_url,
          status: 'DOWN',
          status_code: 500,
          error: null,
        };
        try {
          const response = await this.axios.head(item.incident_url, {
            validateStatus(status) {
              return status >= 200 && status < 400;
            },
          });
          res.status_code = response.status;
          res.status = 'UP';
        } catch (e) {
          if (e.response) {
            res.status_code = e.response.status;
          } else if (e.toString().includes('timeout')) {
            res.status_code = 504;
          }
          res.error = e.toString();
          log.warn(logPrefix, 'callback', e.toString(), '--', item.incident_url);
        }
        return res;
      };
      async.mapLimit(this.data, MAX_CONCURRENCY, callback, (err, res) => {
        if (err) log.error(logPrefix, err.code);
        log.info(logPrefix, '==end', res.filter((i) => i.status === 200).length);
        Detection.saveResults(res);
      });
    }
  }

  static async saveResults(data) {
    const logPrefix = `${containerIdx} -- saveResults`;
    const client = new Client();

    try {
      await client.connect();
      const sql = data.map((item) => `INSERT INTO "work_order_monit" as wom ( "work_order_id", "incident_url", "status", "region", "max_duration_times", "last_monit_status_code", "last_monit_error_message" )
      VALUES
        ( ${item.work_order_id}, '${item.incident_url}', '${item.status}', '${region}', 1, ${item.status_code}, '${item.error}' ) ON CONFLICT ( work_order_id ) DO
        UPDATE SET "incident_url" = '${item.incident_url}', "status" = '${item.status}', "max_duration_times" = case when wom.status = '${item.status}' then wom."max_duration_times" + 1 else 1 end, "last_monit_status_code" = ${item.status_code}, "last_monit_error_message" = ${item.error ? `'${item.error}'` : null}, "last_monit_at" = now();`);
      const sqlArr = group(sql, 100);

      for (let i = 0; i < sqlArr.length; i += 1) {
        await client.query(sqlArr[i].join(' '));
      }

      log.info(logPrefix, 'saved heartbeat success');
    } catch (e) {
      log.error(logPrefix, e.toString());
    } finally {
      await client.end();
    }
  }

  async refreshData() {
    const logPrefix = `${containerIdx} -- refreshData`;
    const client = new Client();
    try {
      await client.connect();
      const sql = `
      SELECT
        work_order_id,
        incident_url
      FROM
        work_order
      WHERE
        substr(reverse(work_order_id::VARCHAR),1,1) in ('${2 * config.idx}','${2 * config.idx + 1}')
        and length(incident_url) > 0
        and status_id = 2
        and deleted_at is null
      ORDER BY
        work_order_id ASC;`;
      const result = await client.query(sql);
      if (result.rows.length > 0) {
        this.data = result.rows;
      }
    } catch (e) {
      log.error(logPrefix, e.toString());
    } finally {
      await client.end();
    }
  }
}

module.exports = Detection;
