# -*- coding: utf-8 -*-

import requests, datetime, winsound, paramiko, toml, os, logging
from bs4 import BeautifulSoup
import PyQt5.QtCore as pqc
from concurrent.futures import ThreadPoolExecutor
from urllib.request import urlretrieve

configpath = r'./cnf/log_config.toml'
config = toml.load(configpath)
online_list = config['ServerList'][0]['online']
ins_log_path = config['ConfigInit']['inspect_log_path']
ins_res_path = config['ConfigInit']['inspect_res_path']
pub_down_path = config['ConfigInit']['pub_down_path']
is_pubdown = config['ConfigInit']['pub_download']
logger = logging.getLogger('LogInspection')
logger.setLevel(logging.INFO)
fmt = logging.Formatter(
    "%(asctime)s--%(name)s-%(threadName)s-%(levelname)s-%(module)s-%(funcName)s-%(message)s-%(lineno)d")
fh = logging.FileHandler(os.path.join(ins_log_path, "autocheck.log"), encoding='utf-8')
fh.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(fmt)
fh.setFormatter(fmt)
logger.addHandler(ch)
logger.addHandler(fh)


class MagicDict(dict):
    def __getitem__(self, item):
        try:
            return dict.__getitem__(self, item)
        except KeyError:
            value = self[item] = type(self)()
            return value


class Check(pqc.QObject):
    err_signal = pqc.pyqtSignal(str)

    def __init__(self):
        super(Check, self).__init__()
        logger.info("establish new Check class")
        self.error_flag = False
        self.port = 22
        self.header = {
            'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36 Edg/114.0.1823.58',
        }
        self.postdata = {
            'user': 'root',
            'pass': 'rootme'
        }
        self.today = datetime.datetime.now().strftime("%Y%m%d")
        self.res = MagicDict()
        self.log_res = []
        self.today_dir()

    def today_dir(self):
        try:
            os.mkdir(os.path.join(pub_down_path, datetime.datetime.now().strftime("%Y%m%d")))
        except:
            print('Dir already exists!')

    def logfile_check(self, ip, logpath):
        logger.info("starting")
        self.error_flag = False
        logurl = 'http://21.104.14.{}:8080/JOPENSWeb/mon/logViewer.json?'.format(ip)
        para = {
            'fileName': logpath,
            'start': -1,
            'loadNum': 1000,
            't': str(datetime.datetime.timestamp(datetime.datetime.now())).replace('.', '')[:-3]
        }
        logger.info(f"visit sss web:{logurl}")
        resp = requests.get(logurl, params=para, headers=self.header)
        LogFileContent = resp.json()
        logger.info("sss web content analysis")
        err_keyword = 'WARN'
        logger.info(f"loglevel above {err_keyword} judge start")
        for i in range(len(LogFileContent['logList'])):
            if LogFileContent['logList'][i]['logLevel'] == err_keyword:
                self.error_flag = True
                err_str = '21.104.14.{}---{}---Error detected:{} '.format(ip, logpath.split('/')[-1],
                                                                          LogFileContent['logList'][i])
                self.log_res.append(err_str)
                self.err_signal.emit(err_str)
        logger.info(f"loglevel above {err_keyword} judge end")
        if not self.error_flag:
            self.log_res.append('21.104.14.{}---{}---file checked. No Error!'.format(ip, logpath.split('/')[-1]))

        winsound.Beep(440, 1000) if config['ConfigInit']['beep_alarm'] and self.error_flag else False
        self.res['21.104.14.{}'.format(ip)]['logfile_res'] = self.log_res
        logger.info("end")

    def sssweb_check(self, ip):
        logger.info("starting")
        sss_res = {}
        login_url = 'http://21.104.14.{}:8080/jopens-sss/sss/login2'.format(ip)
        resp = requests.post(login_url, headers=self.header, data=self.postdata, allow_redirects=False)
        sss_cookies = resp.cookies
        logger.info(f"visit sss web:{login_url} and gain sign-in cookies:{sss_cookies}")
        r2 = requests.get('http://21.104.14.{}:8080/jopens-sss/config/sssStations.json?'.format(ip),
                          headers=self.header,
                          cookies=sss_cookies)
        logger.info("station number detail")
        descrption_list = ['负延时', '0～1秒', '1～2秒', '2～2.5秒', '2.5～5秒', '5～10秒', '10～30秒', '30秒～60秒',
                           '1～5分钟', '5～15分钟',
                           '大于15分钟',
                           'N/A', '无数据']
        sta_num = []
        if r2.status_code == 200:
            for i in range(len(descrption_list)):
                sta_num.append(len(r2.json()['staSummary'][i]['list']))
            sta_obj = zip(descrption_list, sta_num)
            sss_res['station_count'] = list(sta_obj)
            logger.info("station number detail analysis")
            try:
                if sta_num[1] / sum(sta_num[:-3]) < 0.50:
                    winsound.Beep(440, 500) if config['ConfigInit']['beep_alarm'] else False
                    check_str = '21.104.14.{}---0~1s station number is too less(which is {})，please check!'.format(ip,
                                                                                                                   sta_num[
                                                                                                                       1])
                    self.err_signal.emit(check_str)
                else:
                    check_str = '21.104.14.{}---station running situation is Normal.'.format(ip)
            except ZeroDivisionError as e:
                logger.warning(f"station number division wrongPlease Check station status!---{e}")
                check_str = '21.104.14.{}---!Please Check station status!.'.format(ip)
                self.err_signal.emit(check_str)
        else:
            winsound.Beep(440, 500) if config['ConfigInit']['beep_alarm'] else False
            check_str = '21.104.14.{}---fail to connect sss server config web.'.format(ip)
            self.err_signal.emit(check_str)
            logger.warning("fail to connect sss server config web")
        sss_res['sta_res'] = check_str

        logger.info("sss datastream user status analysis")
        r3 = requests.get('http://21.104.14.{}:8080/jopens-sss/config/sssUsers'.format(ip), headers=self.header,
                          cookies=sss_cookies)
        if r3.status_code == 200:
            soup = BeautifulSoup(r3.text, 'html.parser')
            no_cur_usr = soup.find('table', id='noList_table').find('caption').get_text().split(':')[1].strip(')')
            dat_cur_usr = soup.find('table', id='datList_table').find('caption').get_text().split(':')[1].strip(')')
            if no_cur_usr != '0':
                winsound.Beep(440, 500) if config['ConfigInit']['beep_alarm'] else False
                err_str = '21.104.14.{}---There is no-data user,Please pay attention!'.format(ip)
                sss_res['usr_res'] = err_str
                self.err_signal.emit(err_str)
                logger.warning(f"21.104.14.{ip}---There is no-data user,Please pay attention!")
            else:
                sss_res['usr_res'] = '21.104.14.{}---{} users has data transporting,No Error happened.'.format(ip,
                                                                                                               dat_cur_usr)

        self.res['21.104.14.{}'.format(ip)]['sss_res'] = sss_res
        logger.info("sss datastream user status analysis done")

    def mergeweb_check(self, ip):
        logger.info("starting")
        merge_res = []
        merge_url = 'http://21.104.14.{}:8080/JOPENSWeb/cata/mapDB_eEWMergeOriginList_lite.json?'.format(ip)
        para = {
            'timeFlag': str(datetime.datetime.timestamp(datetime.datetime.now())).replace('.', '')[:-3],
            'simple': 'false',
            'sys_flag': 'W',
            'daySpan': '3',
            'bt': (datetime.datetime.now() - datetime.timedelta(days=3)).strftime("%Y%m%d"),
            'et': datetime.datetime.now().strftime("%Y%m%d")
        }
        r2 = requests.get(merge_url, params=para, headers=self.header)
        logger.info(f"merge web analysis start {merge_url}")
        if len(r2.json()) == 0:
            err_str = '21.104.14.{}---Within 3 days there are NO merge-result,please pay attention!'.format(ip)
            merge_res.append(err_str)
            self.err_signal.emit(err_str)
            logger.warning(f"21.104.14.{ip}---Within 3 days there are NO merge-result,please pay attention!")
            winsound.Beep(440, 500) if config['ConfigInit']['beep_alarm'] else False
        else:
            logger.info(f"merge web result are normal")
            merge_res.append(
                '21.104.14.{}---Within 3 days merge-result produce Normal.Detail are below list:'.format(ip))
            for i in r2.json():
                evtid = i
                locName = r2.json()[i][0]['origin']['locName']
                oritime = r2.json()[i][0]['origin']['oriTime']
                magnitude = r2.json()[i][0]['origin']['magnitude']
                issend = 'PUB' if r2.json()[i][0]['serial'] > 0 else 'UN_PUB'
                if issend == 'PUB' and is_pubdown:
                    self.merge_down(ip, evtid)
                merge_res.append('21.104.14.{}---{}--{}--{}--{}--{:.2f}'.format(ip, evtid, issend,
                                                                                datetime.datetime.strptime(oritime,
                                                                                                           '%Y%m%d%H%M%S%f'),
                                                                                locName, magnitude))
                logger.info(
                    "merge result {}--{}--{}--{}--{:.2f}".format(evtid, issend,
                                                                 datetime.datetime.strptime(oritime, '%Y%m%d%H%M%S%f'),
                                                                 locName, magnitude))
        self.res['21.104.14.{}'.format(ip)]['merge_res'] = merge_res
        logger.info("end")

    def sshres_check(self, full_ip, usr, pwd, dirpath):
        logger.info("starting")
        ssh_res = []
        s = paramiko.SSHClient()
        s.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        s.connect(full_ip, self.port, usr, pwd)
        logger.info(f"paramiko.SSHClient {full_ip} connected")

        today = datetime.datetime.now().strftime('%Y%m%d')
        stdin, stdout, sterr = s.exec_command('ls {}|grep {}'.format(dirpath, today))
        logger.info(f"SSHClient result file:{stdout} ")
        today_file = stdout.read().decode().splitlines()
        today_filter_file = list(filter(lambda x: '_bak' not in x, today_file))
        logger.info("Newest File judge start")
        if today_filter_file:
            logger.info(f"SSHClient result today-file filter:{today_filter_file} ")
            ssh_res.append('{}---Newest File is: {}'.format(full_ip, today_filter_file[-1]))
        else:
            stdin, stdout, sterr = s.exec_command('ls {}'.format(dirpath))
            new_file = stdout.read().decode().splitlines()
            new_filter_file = list(filter(lambda x: '_bak' not in x, new_file))
            check_str = '{}---Today has no result,Newest File is: {}'.format(full_ip, new_filter_file[-1])
            logger.warning(check_str)
            ssh_res.append(check_str)
            print(check_str, type(check_str))
            self.err_signal.emit(check_str)
        self.res[full_ip]['ssh_res'] = ssh_res
        logger.info("end")

    def fjlog_check(self, full_ip, usr, pwd, logpath):
        logger.info("starting")
        para_res = []
        s = paramiko.SSHClient()
        s.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        s.connect(full_ip, self.port, usr, pwd)
        logger.info(f"paramiko.SSHClient {full_ip} connected")
        stdin, stdout, sterr = s.exec_command('tail -200 {}'.format(logpath))
        eew_log = stdout.read().decode().splitlines()
        err_keyword = "ERROR"
        err_filter = list(filter(lambda x: err_keyword in x, eew_log))
        logger.info(f"eew logfile above {err_keyword} judge start")
        if err_filter:
            err_str = '{}---Error detected:\n{}'.format(full_ip, err_filter)
            para_res.append(err_str)
            self.err_signal.emit(err_str)
        else:
            para_res.append('{}---logfile---OK'.format(full_ip))
        logger.info(f"eew logfile above {err_keyword} judge end")
        self.res[full_ip]['para_res'] = para_res
        logger.info("end")

    def merge_down(self, ip, evtid):
        self.today_dir()
        url1 = f'http://21.104.14.{ip}:8080/JOPENSWeb/cata/eewReportDownload?mergeID={evtid}&type=1'
        url2 = f'http://21.104.14.{ip}:8080/JOPENSWeb/cata/eewReportDownload?mergeID={evtid}&type=2'
        file_name1 = f'{evtid}_yujingbaogao.docx'
        file_name2 = f'{evtid}_yujingronghebaogao.docx'
        logger.info(f"21.104.14.{ip}-{evtid}--yujingBaoGao down start")
        try:
            urlretrieve(url1, os.path.join(pub_down_path, self.today, file_name1))
        except Exception as e:
            logger.error(f"21.104.14.{ip}-{evtid}--yujingBaoGao down error:{e}")
            print(e)
        logger.info(f"21.104.14.{ip}-{evtid}--yujingRongHeBaoGao down start")
        try:
            urlretrieve(url2, os.path.join(pub_down_path, self.today, file_name2))
        except Exception as e:
            logger.error(f"21.104.14.{ip}-{evtid}--yujingRongHeBaoGao down error:{e}")
            print(e)


class main_run(pqc.QThread):
    check_signal = pqc.pyqtSignal(str)
    end_signal = pqc.pyqtSignal()
    def run(self):
        logger.info("auto-check main process start")
        self.dfio = file_io()
        winsound.Beep(800, 300) if config['ConfigInit']['beep_alarm'] else False
        with ThreadPoolExecutor() as executor:
            executor.map(self.server_process, online_list)
        self.dfio.tail_write()
        winsound.Beep(500, 300) if config['ConfigInit']['beep_alarm'] else False
        logger.info("auto-check main process end")
        self.end_signal.emit()

    def server_process(self, server_i):
        check = Check()
        check.err_signal.connect(self.signal_handler)
        print('*' * 49)
        full_ip = server_i['ip']
        print(full_ip, '-' * 14, 'AutoCheck Start')
        solo_ip = str(full_ip).split('.')[-1]
        if server_i['sss_web_check']:
            check.sssweb_check(ip=solo_ip)
        if server_i['log_file_check']:
            loglist = server_i['log_file_list']
            for log_i in loglist:
                logpath = config['LogPath'][log_i + '_path']
                check.logfile_check(ip=solo_ip, logpath=logpath)
        if server_i['merge_web_check']:
            check.mergeweb_check(ip=solo_ip)
        if server_i['fj_log_check']:
            check.fjlog_check(full_ip=full_ip, usr=config[full_ip]['user'], pwd=config[full_ip]['pswd'],
                              logpath=server_i['fj_log_path'])
        if server_i['ssh_result_check']:
            check.sshres_check(full_ip=full_ip, usr=config[full_ip]['user'], pwd=config[full_ip]['pswd'],
                               dirpath=server_i['ssh_result_path'])

        self.dfio.res2txt(check.res)

    def signal_handler(self, err_string):
        self.check_signal.emit(err_string)


class file_io():
    def __init__(self):
        logger.info("Inspection output file header writing")
        self.filename = datetime.datetime.now().strftime("%Y%m%d") + '.txt'
        with open(os.path.join(ins_res_path, self.filename), 'a+', encoding='utf-8') as f:
            self.init_write(filecon=f)

    def res2txt(self, dic2w):
        logger.info("Inspection output file result(dict) writing__all")
        with open(os.path.join(ins_res_path, self.filename), 'a+', encoding='utf-8') as f:
            self.dict_write(dic2w, filecon=f)

    def dict_write(self, dic2w, filecon):
        logger.info("Inspection output file result(dict) writing__detail")
        for key, value in dic2w.items():
            if key.startswith('21.104.14'):
                filecon.write('*' * 72 + '\n')
            print(key, value, type(value))
            if type(value) is list:
                try:
                    print('list output')
                    filecon.write(f'---->{key}:\n')
                    filecon.writelines('\n'.join(value) + '\n')
                except TypeError:
                    filecon.writelines('\n'.join(['\t' + str(x) for x in value]) + '\n')
            elif type(value) is str:
                filecon.write(f'---->{key}:\n')
                filecon.write(f'{value}\n')
            else:
                print('dict output')
                filecon.write(f'---->{key}:\n')
                self.dict_write(value, filecon)

    def init_write(self, filecon):
        logger.info("Inspection output file header__details")
        init_str = ['*' * 72, 'Maintenance And Inspection ToolPlatform For SCEA seismic early warning test system',
                    f"Inspection Date:{datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')}",
                    f'ConfigFile:{os.path.abspath(configpath)}',
                    '---->Interpretation:',
                    'logfile_res:system logfile inspection for instance jopens.log/sss.log/box.log/...',
                    'sss_res:sss-web inspection result ,which contains sation_count and usr_res',
                    '\tstation_count:the number of station datastream delay',
                    '\tusr_res:the number of normal user with valid data transport',
                    'para_res:EEW logfile inspection result with paramiko module',
                    'merge_res:early warning result for merge system',
                    'ssh_res:system production file_name with paramiko module']
        filecon.writelines('\n'.join(init_str))
        filecon.write('\n' * 2)

    def tail_write(self):
        logger.info("Inspection output file tail write done")
        ip_list = [item['ip'] for item in online_list]
        with open(os.path.join(ins_res_path, self.filename), 'r', encoding='utf-8') as f:
            content = f.read()
        with open(os.path.join(ins_res_path, self.filename), 'a+', encoding='utf-8') as f:
            f.write('*' * 72 + '\n')
            for i in ip_list:
                if i in content:
                    f.write(i + ' check √ \n')
                else:
                    f.write(i + ' check × \n')


class ip_port_check():
    def conductivity(self, ip, port=8080):
        import socket
        sk = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        sk.settimeout(1)
        result = sk.connect_ex((ip, port))
        sk.close()
        if result == 0:
            logger.info("ip_port connected success")
        else:
            logger.warning("ip_port connected failed")
        return result


if __name__ == '__main__':
    if ip_port_check().conductivity('21.104.14.3') == 0:
        main_run()
