import os
from datetime import datetime
import sys
import re
from postgresql_db import *
from abc import ABC, abstractmethod


def get_last_position(position_file_dir, position_file_path):
    """
    获取上次读取位置
    :param position_file_dir: 位置目录路径
    :param position_file_path: 位置文件路径
    :return:
    """
    if os.path.exists(position_file_path):
        with open(position_file_path) as position_file:
            return int(position_file.read())
    else:
        os.makedirs(position_file_dir, exist_ok=True)
        return 0


class LogsParserStrategy(ABC):
    @abstractmethod
    def parse_logs(self, paths, rule, name, database_rule):
        pass


class RegexLineRuleParser(LogsParserStrategy):
    def __init__(self, regex_line_rule):
        self.rule_str = regex_line_rule

    def parse_logs(self, paths, rule, name, database_rule):
        for item in paths:
            logger.info(f'{datetime.now().strftime("%Y-%m-%d %H:%M:%S")} {name} {item} start parse log')
            if not os.path.exists(item):
                logger.error(f'{item} 文件不存在')
            else:
                # 获取文件字节总大小
                log_file_size = os.path.getsize(item)
                # 获取文件名称(不带扩展名)
                dir_str, ext = os.path.splitext(item)
                file_name = dir_str.split(os.sep)[-1]
                # 获取文件名称(带扩展名)
                # file_name = os.path.basename(path)
                # 获取日志创建时间
                file_create_time = os.path.getctime(item)
                # 格式化日志创建时间
                file_date = time.strftime('%Y-%m-%d', time.localtime(file_create_time))
                # 定义文件位置记录
                position_file_dir = os.path.join(os.path.dirname(os.path.abspath(sys.executable)), 'position')
                position_file_path = os.path.join(position_file_dir, file_name + '_' + file_date + '.position')
                # 定义上次记录位置
                last_position = get_last_position(position_file_dir, position_file_path)
                # 解析日志
                with open(item, 'r', encoding='utf-8') as log_file:
                    log_file.seek(last_position)
                    results = []
                    while last_position < log_file_size:
                        line = log_file.readline()
                        if line.strip() == '':
                            logger.warning(f'{name} {item} {log_file.tell()} 为空')
                        else:
                            match = re.match(rule['regex_line_rule'], line)
                            if match:
                                result = match.groupdict()
                                results.append(result)
                            else:
                                logger.error(f'{name} {item} 日志格式不匹配:\n {line}')
                        # 记录当前读取位置
                        last_position = log_file.tell()
                    logger.info(f'{name} {item} 读取位置: {last_position}')
                    if len(results) > 0:
                        batch_insert(results, database_rule['table_name'], database_rule['columns'])
                # 将当前读取位置写入文件记录
                with open(position_file_path, mode='w') as position_file:
                    position_file.write(str(last_position))


class LogsParserFactory:

    _strategy_mapping = {
        'regex_line_rule': RegexLineRuleParser
    }

    @classmethod
    def get_parser(cls, rule_type):
        try:
            strategy_class = cls._strategy_mapping.get(rule_type)
            return strategy_class
        except KeyError:
            raise NotImplementedError(f'暂不支持该规则类型: {rule_type}')

# def parse_logs(paths, rule, name, database_rule):
#     """
#     解析日志内容
#     :param paths: 日志路径
#     :param rule: 解析规则
#     :param name: 任务名称
#     :param database_rule: 数据库保存规则
#     """
#     for item in paths:
#         logger.info(f'{datetime.now().strftime("%Y-%m-%d %H:%M:%S")} {name} {item} start parse log')
#         if not os.path.exists(item):
#             logger.error(f'{item} 文件不存在')
#         if next(iter(rule)) == 'regex_line_rule':
#             # 获取文件字节总大小
#             log_file_size = os.path.getsize(item)
#             # 获取文件名称(不带扩展名)
#             dir_str, ext = os.path.splitext(item)
#             file_name = dir_str.split(os.sep)[-1]
#             # 获取文件名称(带扩展名)
#             # file_name = os.path.basename(path)
#             # 获取日志创建时间
#             file_create_time = os.path.getctime(item)
#             # 格式化日志创建时间
#             file_date = time.strftime('%Y-%m-%d', time.localtime(file_create_time))
#             # 定义文件位置记录
#             position_file_dir = os.path.join(os.path.dirname(os.path.abspath(sys.executable)), 'position')
#             position_file_path = os.path.join(position_file_dir, file_name + '_' + file_date + '.position')
#             # 定义上次记录位置
#             last_position = get_last_position(position_file_dir, position_file_path)
#             # 解析日志
#             with open(item, 'r', encoding='utf-8') as log_file:
#                 log_file.seek(last_position)
#                 results = []
#                 while last_position < log_file_size:
#                     line = log_file.readline()
#                     if line.strip() == '':
#                         logger.warning(f'{name} {item} {log_file.tell()} 为空')
#                     else:
#                         match = re.match(rule['regex_line_rule'], line)
#                         if match:
#                             result = match.groupdict()
#                             results.append(result)
#                         else:
#                             logger.error(f'{name} {item} 日志格式不匹配:\n {line}')
#                     # 记录当前读取位置
#                     last_position = log_file.tell()
#                 logger.info(f'{name} {item} 读取位置: {last_position}')
#                 if len(results) > 0:
#                     batch_insert(results, database_rule['table_name'], database_rule['columns'])
#             # 将当前读取位置写入文件记录
#             with open(position_file_path, mode='w') as position_file:
#                 position_file.write(str(last_position))
