import json
import shutil
from pathlib import Path

from model.model import Source, ParserGroup, CollectorTask
from common.collector_common.collector import CollectorTaskMap

from common.collector_common.receiver import LogstashMap
from package.connector.kafka_db import kafka_producer
from package.fastapi.job import scheduler_job
from fastapi import HTTPException


class SourceView(Source):

    BaseDir = '/home/chenst/worker/logstash/pipeline'

    @classmethod
    def ensure_output(cls):
        with open(f'{cls.BaseDir}/output.conf', 'w') as f:
            f.write(LogstashMap['tcp'].Output)

    @classmethod
    def _create_source_configure(cls, item):
        """创建数据源获取数据的Logstash配置文件"""
        cls_ = LogstashMap[item.category]
        if item.enabled is False:
            content = cls_.Init
        else:
            content = cls_.render(category=item.category, port=item.port, id=item.id, configure=item.configure, host=item.host)

        with open(f'{cls.BaseDir}/{item.id}.conf', 'w') as f:
            f.write(content)
        return content
    
    @classmethod
    def _delete_source_configure(cls, item):
        path = (Path(cls.BaseDir) / f'{item.id}.conf').resolve()
        path.unlink(missing_ok=True)

    @classmethod
    def create_source(cls, session, body):
        item = cls.create_item(session, body)
        if LogstashMap.get(item.category):
            cls._create_source_configure(item)
        return item
    
    @classmethod
    def update_source(cls, session, filter_, update):
        item = cls.update_item(session, filter_=filter_, update=update)
        if LogstashMap.get(item.category):
            cls._create_source_configure(item)
        return item

    @classmethod
    def delete_sources(cls, session, filter_):
        """删除任务"""
        for item in cls.get_items(session, filter_=filter_):
            if item.category in LogstashMap:
                cls._delete_source_configure(item)
            else:
                for collector in item.collectors:
                    CollectorTaskView._delete_job(collector)
        return cls.delete_items(session, filter_)

    @classmethod
    def bind_source_parser_groups(cls, session, id, group_ids):
        """关联 数据源和解析规则组"""
        item = session.query(cls).filter(cls.id == id).first()

        gids = set(group_ids) - set(_group.id for _group in item.groups.all())  # 可能重复添加
        if not gids:
            return 0

        groups = session.query(ParserGroup).filter(ParserGroup.id.in_(gids)).all()
        item.groups.extend(groups)
        session.commit()
        return len(groups)

    @classmethod
    def delete_source_parser_groups(cls, session, id, group_ids):
        """解绑 数据源和解析规则组"""
        item = session.query(cls).filter(cls.id == id).first()
        groups = session.query(ParserGroup).filter(ParserGroup.id.in_(group_ids)).all()

        count = 0
        for _group in groups:
            item.groups.remove(_group)
            count += 1

        session.commit()
        return count


SourceView.ensure_output()


class CollectorTaskView(CollectorTask):
    """采集任务"""
    CollectorPrefix = 'collect:collector:'
    Topic = 'todo_parser_queue'

    @classmethod
    def _add_job(cls, item):
        scheduler_job.add_job(job_id=f'{cls.CollectorPrefix}{item.category}_{item.id}',
                              func=cls.execute, 
                              cron=item.cron,
                              kwargs={'category': item.category, 'configure': item.configure, 'source_id': item.source_id, 'as_list': item.as_list})
    
    @classmethod
    def _delete_job(cls, item):
        job_id=f'{cls.CollectorPrefix}{item.category}_{item.id}'
        scheduler_job.delete_job(job_id)

    @classmethod
    def execute(cls, category, configure, as_list=False, verify=False, source_id=None):
        """任务"""
        cls_ = CollectorTaskMap.get(category)
        if not cls_:
            raise Exception('该数据采集方式暂不支持')
        
        instance = cls_(**configure)
        data = instance.collector()
        if verify is True:  # 仅验证
            return data
        
        if isinstance(data, list):
            if as_list is False:  # 拆分日志
                for item in data:
                    kafka_producer.send(cls.Topic, json.dumps({'_sourceid': source_id, 'message': item}, ensure_ascii=False))
                return
            
        kafka_producer.send(cls.Topic, json.dumps({'_sourceid': source_id, 'message': data}, ensure_ascii=False))

    @classmethod
    def create_collector(cls, session, body):
        item = cls.create_item(session, body=body)
        cls._add_job(item)
        return item
    
    @classmethod
    def update_collector(cls, session, filter_, update):
        item = cls.update_item(session, filter_, update)
        cls._add_job(item)
        return item

    @classmethod
    def delete_collectors(cls, session, filter_):
        items = cls.get_items(session, filter_=filter_)
        for item in items:
            cls._delete_job(item)
        return cls.delete_items(session, filter_)
    
    @classmethod
    def collector_enable(cls, session, filter_, update):
        """启用|禁用"""
        item = cls.update_item(session, filter_, update)
        if item.enabled:
            cls._add_job(item)
            return item
        
        cls._delete_job(item)
        return item
