#!/user/bin/python
# -*- coding: utf-8 -*-
'''
 @Time    : 2018/5/16 20:00
 @File    : task_service.py
 @desc    :
'''
from commonSpiders.entity.item_data_source import ImteDataSourceEntity
from commonSpiders.entity.item_info import ItemInfoEntity
from commonSpiders.entity.item_meta import ItemMetaEntity
from commonSpiders.entity.task_entity import TaskInfoEntity
from commonSpiders.global_context_manager.context_manager import ContextManager
from commonSpiders.net.extend_context import ContextExtend
from commonSpiders.net.message.socketio_msg import Success, Failure
from commonSpiders.scrapy_clusters_manager.config.constant import ManagerConstant
from commonSpiders.scrapy_clusters_manager.config.creeper_namespace_key import CreeperNamespaceKey
from commonSpiders.scrapy_clusters_manager.entity.context_entity import SocketIoContext
from commonSpiders.scrapy_clusters_manager.mysql.models import CrawlerTask, CrawlerGroupSettingsItem, ItemInfo, \
    ItemMeta, ItemDataSource
from commonSpiders.scrapy_clusters_manager.mysql.persistent import MysqlPerisistence
from commonSpiders.storage.redis.redis_client import RedisClient
from commonSpiders.utils.singleton import singleton




@singleton
@ContextExtend.extend('CrawlerTaskService', priority=1, async=False)
class CrawlerTaskService(object):
    '''
    爬虫任务服务
    '''

    def __init__(self):
        self.context = ContextManager()
        self.redis = RedisClient().get_redis(0)
        self.mysql_service = MysqlPerisistence().session

    def start_task(self, task_id):
        '''
        开启一个任务
        :param task_id:
        :return:
        '''
        # TODO wangkai 20180522 23:13 获取任务相关数据
        try:
            # 1 获取任务数据
            task = self.mysql_service.query(CrawlerTask).filter(CrawlerTask.id == task_id).first()

            # 2 获取爬虫管理器socketio
            settings_col = self.mysql_service.query(CrawlerGroupSettingsItem).filter(CrawlerGroupSettingsItem.task_id == task_id).all()
            settings_list = [item for item in settings_col]
            settings = {}
            for item in settings_list:
                settings.update({
                    item.key: item.value
                })

            # 3 发送数据
            item_list = []
            items = self.mysql_service.query(ItemInfo).filter(ItemInfo.task_id == task_id).all()
            for item in items:
                item_meta_list = []
                data_source = self.mysql_service.query(ItemDataSource).filter(ItemDataSource.id == item.item_datasource_id).first()
                data_source_entity = ImteDataSourceEntity(data_source.ip, data_source.port, data_source.username, data_source.password, data_source.path)
                item_metas = self.mysql_service.query(ItemMeta).filter(ItemMeta.item_id == item.id).all()
                for item_meta in item_metas:
                    item_meta_list.append(ItemMetaEntity(item_meta.dom_rule, item_meta.key, item_meta.parser_type).to_json())
                item_list.append(ItemInfoEntity(item.key, item.html_url_reg, item.parse_ruler, item_meta_list, data_source_entity.to_json()).to_json())

            task_entity = TaskInfoEntity(task.id, task.min_item_num, task.start_url, task.crawler_num, item_list, settings)

            socket_context = self.context.get(ManagerConstant.CRAWLER_MANAGER_KEY, SocketIoContext())
            socket_info = socket_context.get_manager_dict()
            if socket_info:
                msg = Success(msg='创建成功')
                data = Success(data={
                    'task_info': task_entity.to_json(),
                    'manager_guid': socket_info[0]
                })
                socket_info[1].emit('start_task', data, namespace=CreeperNamespaceKey.TASK_NAMESPACE_KEY)
            else:
                msg = Failure(error='没有爬虫进程管理器可调度，无法创建任务')

        except Exception as e:
            msg = Failure(error=e.message)
        return msg

    def stop_task(self, task_id):
        '''
        停止爬虫任务
        :param task_id:
        :return:
        '''
        pass

    def pause_task(self, task_id):
        '''
        暂停爬虫任务
        :param task_id:
        :return:
        '''
        pass

    def resume_task(self, task_id):
        '''
        恢复爬虫任务
        :param task_id:
        :return:
        '''
        pass