#!/user/bin/python
# -*- coding: utf-8 -*-
'''
 @Time    : 2018/5/20 21:29
 @File    : business_namespace.py
 @desc    :
'''
import time

from socketIO_client import BaseNamespace

from commonSpiders.creeper.manager.manager import MANAGER_KEY
from commonSpiders.creeper.net.client_namespace import service_excute
from commonSpiders.creeper.net.msg import CreeperSuccessMsg
from commonSpiders.creeper.service.crawler_task_service import CLIENT_CRAWLER_TASK_SERVICE_KEY
from commonSpiders.creeper.service.service import CLIENT_CRAWLER_PROCESS_MANAGER_SERVICE_KEY
from commonSpiders.creeper.utils.namespace_utils import get_extend_context


class CrawlerProcessNamespace(BaseNamespace):
    '''
    用于主动向服务端注册爬虫管理器
    '''
    KEY = '/crawler_process'

    def test(self):
        pass

    def on_connect(self):
        '''
        连接服务器需要注册
        :return:
        '''
        extend_context = get_extend_context(self)
        manager = extend_context.get(MANAGER_KEY, None)
        msg = service_excute(self, CLIENT_CRAWLER_PROCESS_MANAGER_SERVICE_KEY, 'register_crawler_manager', manager)
        self.emit('register', msg)

    def on_reconnect(self):
        '''
        重连成功也需要注册
        :return:
        '''
        print('客户端注册')
        extend_context = get_extend_context(self)
        manager = extend_context.get(MANAGER_KEY, None)
        msg = service_excute(self, CLIENT_CRAWLER_PROCESS_MANAGER_SERVICE_KEY, 'register_crawler_manager', manager)
        self.emit('register', msg)

    def on_disconnect(self):
        '''
        断开连接需要登出
        :return:
        '''
        print('断开连接')

    def on_register_success(self, data):

        manager = get_extend_context(self).get(MANAGER_KEY, None)
        msg = service_excute(self, CLIENT_CRAWLER_PROCESS_MANAGER_SERVICE_KEY, 'register_success', manager)

    def on_logout_success(self, data):
        print('登出成功')


class ClientSystemInfo(BaseNamespace):

    KEY = '/client_system_info'

    def on_connect(self):
        # time.sleep(60)
        self.emit('update_manager_system_cup', {'fasd': 1})
        # threading.Thread(target=self.get_cpu_info())
        # threading.Thread(target=self.get_cpu_info())
        # threading.Thread(target=self.get_cpu_info())
        # threading.Thread(target=self.get_cpu_info())
        pass

    def get_cpu_info(self):
        '''
        获取cpu信息
        :return:
        '''
        pass

    def get_meu_info(self):
        '''
        获取meu使用信息
        :return:
        '''
        pass

    def get_hd_info(self):
        '''
        获取硬盘使用信息
        :return:
        '''
        pass

    def get_net_info(self):
        '''
        获取网络使用信息
        :return:
        '''
        pass


class TaskNamespace(BaseNamespace):

    KEY = '/crawler_task'

    def on_connect(self):
        print('客户端请求分配任务')
        # time.sleep(10)
        # self.emit('start_task', CreeperSuccessMsg(msg='请求分配任务', data={'task_id': 1}))

    def on_start_task(self, data):
        '''
        创建新任务
        :param data:
        :return:
        '''
        msg = service_excute(self, CLIENT_CRAWLER_TASK_SERVICE_KEY, 'start_task', data)
        # self.emit('')