#!/usr/bin/env python3
# coding: utf-8
# Time: 2022-07-20 22:51:59
# Desc: 上传系统日志到日志服务器，一般在用例失败后调用次命令

import os
import re
import shutil
import signal
import sys
import tempfile
import time

g_common_topdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(g_common_topdir)
from lib.common import get_crash_path, command, command_quiet, command_output, get_tst_test_env_ip, get_host_ip, \
    get_os_release, get_kernel_release, get_system_boot_time, sysinfo


class Uploader:
    def __init__(self, config):
        self.config = config

    def is_support(self, protocol):
        if not hasattr(self, f'upload_by_{protocol}'):
            return False
        func = getattr(self, f'upload_by_{protocol}')
        if not callable(func):
            return False
        return True

    def upload_by_rsync(self, logs):
        """
        通过rsync协议上传文件
        :param logs: 要上传的文件路径
        :return:
            0：表示至少一个服务器上传成功
            1：表示所有服务器均上传失败
        """
        return_code = 1
        rsync = self.config['protocols']['rsync']
        for server in rsync['servers']:
            cmd = f"rsync --archive --compress --copy-links --verbose --password-file={rsync['password file']} " \
                  f"{logs} {rsync['user']}@{server}::{rsync['path']}"
            status = command(cmd, timeout=600)
            if status != 0:
                print(f'rsync fail: {cmd}')
                continue
            # 只要有一个服务器上传成功就不返回失败
            return_code = 0
            print(f'rsync success: {cmd}')
            if rsync['upload once']:
                break
        return return_code

    def upload(self, logs):
        if not os.path.exists(logs):
            raise FileNotFoundError(f'the logs {logs} not exist')
        is_upload_success = False
        for protocol in self.config['protocols']:
            if protocol == 'rsync':
                status = self.upload_by_rsync(logs)
                if status != 0:
                    continue
                is_upload_success = True
                if not self.config['default']['upload all protocol']:
                    return
        # 所有协议都上传失败就抛异常
        if not is_upload_success:
            raise Exception('all protocol upload fail')


class Collectd:
    def __init__(self, log_dir, log_name):
        self.log_topdir = log_dir
        self.log_name = log_name
        self.config = {
            "default": {
                "auto install tools": True,
                "upload all protocol": False,
                "detect item": [
                    "crash",
                    "coredump"
                ]
            },
            "protocols": {
                "rsync": {
                    "servers": [
                        "tst.tlinux.woa.com"
                    ],
                    "upload once": True,
                    "user": "testcase",
                    "password file": f"{g_common_topdir}/cmd/rsync.secret",
                    "path": "case_logs/"
                }
            }
        }
        os.chmod(self.config['protocols']['rsync']['password file'], mode=0o600)
        os.chown(self.config['protocols']['rsync']['password file'], uid=os.getuid(), gid=os.getgid())
        self.uploader = Uploader(self.config)
        self.crash_path = get_crash_path()
        self.ignore_dict = dict()

    def add_ignore_list_file(self, list_file: str, ignore_list: list):
        if list_file not in self.ignore_dict:
            self.ignore_dict[list_file] = list()
        self.ignore_dict[list_file] += ignore_list

    def update_ignore_list_file(self):
        for list_file in self.ignore_dict:
            new_ignore_list = list()
            if os.path.exists(list_file):
                with open(list_file, 'r') as f:
                    self.ignore_dict[list_file] += f.read().splitlines()
            # 如果文件已经不存在了，就不用再忽略了
            for ignore_file in set(self.ignore_dict[list_file]):
                if os.path.exists(ignore_file):
                    new_ignore_list.append(ignore_file)
            # 更新忽略文件列表
            if new_ignore_list:
                with open(list_file, 'w') as f:
                    for will_ignore in new_ignore_list:
                        f.write(f'{will_ignore}\n')
            elif os.path.isfile(list_file):
                # 如果所有需要忽略的文件都不存在了，那把列表文件删掉
                os.remove(list_file)
            # 文件更新后，将对应的记录从字典去掉
        self.ignore_dict.clear()

    def detect_is_crash(self, logdir):
        """
        detect_is_xxxx系列函数用于检查是否有对应的异常出现，如果有，则要将日志放入logdir中
        :param logdir:
        :return:
        """
        if self.crash_path is None:
            return False
        vmcore_ignore_list_file = os.path.join(self.crash_path, 'ignore-vmcore.txt')
        ignore_vmcore_list = list()
        vmcore_list = list()
        if os.path.isfile(vmcore_ignore_list_file):
            with open(vmcore_ignore_list_file, 'r') as f:
                ignore_vmcore_list = f.read().splitlines()

        for root, dirs, files in os.walk(self.crash_path):
            for file in files:
                if file != 'vmcore':
                    continue
                vmcore_file = os.path.join(root, file)
                if vmcore_file in ignore_vmcore_list:
                    continue
                vmcore_list.append(vmcore_file)
        if vmcore_list:
            os.makedirs(logdir, mode=0o755, exist_ok=True)
            for vmcore in vmcore_list:
                vmcore_dir = os.path.dirname(vmcore)
                # 因为vmcore太大了，这里直接创建软链接，避免拷贝vmcore文件
                os.symlink(vmcore_dir, os.path.join(logdir, os.path.basename(vmcore_dir)))
            # 记录一下需要上传的文件，如果上传成功，这些文件都要放到忽略列表里面
            self.add_ignore_list_file(vmcore_ignore_list_file, vmcore_list)
            return True
        return False

    def detect_is_coredump(self, logdir):
        with open('/proc/sys/kernel/core_pattern', 'r') as f:
            pattern = f.readline().strip()
        core_file_list = list()
        core_dir = None
        if pattern.startswith('/'):
            # 如果是直接指定的路径，扫描文件将目录下的core文件找出来
            core_dir = pattern
            if not pattern.endswith('/'):
                core_dir = os.path.dirname(pattern)
            if os.path.exists(core_dir):
                for core_file in os.listdir(core_dir):
                    core_path = os.path.join(core_dir, core_file)
                    outs, _, _ = command_output(f'file "{core_path}"')
                    if re.match(r'.*ELF.*\bcore file\b.*', outs) and os.path.isfile(core_path):
                        core_file_list.append(core_path)
        elif pattern.startswith('|/usr/lib/systemd/systemd-coredump'):
            # 如果是systemd管理的，则使用coredumpctl命令获取core信息
            if command_quiet('coredumpctl list') != 0:
                return False
            outs, errs, return_code = command_output('coredumpctl info')
            os.makedirs(logdir, mode=0o755, exist_ok=True)
            with open(os.path.join(logdir, 'core-info.txt'), mode='w') as f:
                f.write('stdout:\n')
                f.write(outs)
                f.write('stderr:\n')
                f.write(errs)
                f.write(f'return code: {return_code}\n')
            for line in outs.splitlines():
                match = re.match(r'^\s*Storage:\s*(.*)', line.strip(), re.IGNORECASE)
                if match is None:
                    continue
                core_file = match.groups()[0]
                if not os.path.isfile(core_file):
                    continue
                core_dir = os.path.dirname(core_file)
                core_file_list.append(core_file)
        if core_file_list and core_dir and os.path.isdir(core_dir):
            os.makedirs(logdir, mode=0o755, exist_ok=True)
            need_upload_list = list()
            core_ignore_list_file = os.path.join(core_dir, 'ignore-coredump.txt')
            core_ignore_list = list()
            if os.path.isfile(core_ignore_list_file):
                with open(core_ignore_list_file, 'r') as f:
                    core_ignore_list = f.read().splitlines()
            with open(os.path.join(logdir, 'core-list.txt'), mode='w') as f:
                for core_file in core_file_list:
                    f.write(f'{core_file}\n')
                    if (not os.path.isfile(core_file)) or (core_file in core_ignore_list):
                        continue
                    need_upload_list.append(core_file)
                    os.symlink(core_file, os.path.join(logdir, os.path.basename(core_file)))
            if need_upload_list:
                self.add_ignore_list_file(core_ignore_list_file, need_upload_list)
                return True
        return False

    def collectd_upload(self):
        tst_test_env_ip = get_tst_test_env_ip()
        host_ip = get_host_ip()
        os_release = get_os_release()
        kernel_release = get_kernel_release()
        boot_time = get_system_boot_time()

        now_time = time.time()
        time_stamp = time.strftime('%Y%m%d-%H%M%S-', time.localtime(now_time)) + f'{now_time:.09f}'.split('.')[1]
        if tst_test_env_ip is None:
            log_rsync = os.path.join(self.log_topdir, host_ip)
            relative_path = os.path.join(host_ip, os_release, kernel_release, boot_time,
                                         time_stamp + f'.{self.log_name}')
            log_dir = os.path.join(self.log_topdir, relative_path)
        else:
            log_rsync = os.path.join(self.log_topdir, tst_test_env_ip[0])
            relative_path = os.path.join(tst_test_env_ip[0], tst_test_env_ip[1], os_release, kernel_release,
                                         boot_time, time_stamp + f'.{self.log_name}')
            log_dir = os.path.join(self.log_topdir, relative_path)

        # 采集其他异常日志，例如crash、coredump
        for detector in self.config['default']['detect item']:
            func = getattr(self, f'detect_is_{detector}')
            func(os.path.join(log_dir, detector))
        # 收集系统信息
        sysinfo(os.path.join(log_dir, 'sysinfo'), install_command=self.config['default']['auto install tools'])
        # 看是否有用户上传的文件
        user_log_files = os.path.join(self.log_topdir, '..', 'log_files')
        if os.path.exists(user_log_files):
            os.symlink(user_log_files, os.path.join(log_dir, 'user_log_files'))

        print(f'all logs in {log_dir}')
        self.uploader.upload(log_rsync)
        self.update_ignore_list_file()
        print(f'testcase log URL: http://tst.tlinux.woa.com/tst_logs/testcase_logs/{relative_path}')


def usage():
    print(f'Usage: {sys.argv[0]} log_dir log_name')


if __name__ == '__main__':
    if len(sys.argv) != 3:
        usage()
        exit(1)
    arg_log_dir = sys.argv[1]
    arg_log_name = sys.argv[2]
    if (len(arg_log_dir) <= 0) or (len(arg_log_name) <= 0) or (not os.path.isdir(arg_log_dir)):
        usage()
        exit(1)
    Collectd(log_dir=os.path.realpath(arg_log_dir), log_name=arg_log_name).collectd_upload()
    exit(0)
