# coding:utf-8

import sys
import threading

sys.path.append('..')

import os
import signal
from thrift.transport import TSocket, TTransport
from thrift.protocol import TBinaryProtocol
from thrift.server import TServer
from thrift.transport.TTransport import TMemoryBuffer
from convert_processor import ConvertProccessor
from select_processor import SelectProcessor
from crawler_merge_proccessor import CrawlerMergeProccessor

from i_util.thread_pool import ThreadPool
from i_util.input_thread import InputThread
from i_util.pybeanstalk import PyBeanstalk
from i_util.logs import LogHandler
from i_util.heart_beat import HeartbeatThread
from bdp.i_crawler.i_crawler_merge import CrawlerMergeService
from bdp.i_crawler.i_downloader.ttypes import RetStatus
import MySQLdb, json, traceback, getopt, pytoml
from i_util.ProfilingUtil import profiler_creator
from i_util.ProfilingUtil import profiling_signal_handler


# from reload_dic import DicServer


class CrawlerMergeServer(object):
    def __init__(self, conf, convert):
        self.conf = conf
        self.log = conf.get('log')
        self.convert = convert
        thread_locals = {'processor': (CrawlerMergeProccessor, (conf.get('log'), self.convert)), 'profiler': (profiler_creator, ())}
        self.process_pool = ThreadPool(conf['server'].get('process_thread_num'), thread_locals)
        self.input_thread = InputThread(conf.get('beanstalk_conf'), conf.get('log'), self.process_pool)
        self.heartbeat_thread = HeartbeatThread("crawler_merge", self.conf)

    def start(self):
        self.input_thread.start()
        self.heartbeat_thread.start()
        self.log.info("start CrawlerMergeServer!")

    def stop(self, message):
        self.input_thread.stop()
        self.heartbeat_thread.stop()
        self.log.info("stop CrawlerMergeServer {}!".format(message))


class CrawlerMergeHandler(object):
    def __init__(self, conf, convert, select_handler):
        self.conf = conf
        self.log = conf.get('log')
        self.convert = convert
        self.select_handler = select_handler
        self.beanstalk = PyBeanstalk(conf.get('beanstalk_conf').get('host'), conf.get('beanstalk_conf').get('port'))

    def to_string(self, link_info):
        str_entity = None
        try:
            tMemory_b = TMemoryBuffer()
            tBinaryProtocol_b = TBinaryProtocol.TBinaryProtocol(tMemory_b)
            link_info.write(tBinaryProtocol_b)
            str_entity = tMemory_b.getvalue()
        except EOFError, e:
            self.log.warning("can't write LinkAttr to string")
        return str_entity

    def merge(self, page_parseinfo):
        link_attr = self.convert.start_convert(page_parseinfo)
        try:
            if isinstance(self.conf.get('beanstalk_conf').get('output_tube'), list):
                for output_tube in self.conf.get('beanstalk_conf').get('output_tube'):
                    self.beanstalk.put(output_tube, self.to_string(link_attr))
            else:
                self.beanstalk.put(self.conf.get('beanstalk_conf').get('output_tube'), self.to_string(link_attr))
        except Exception, e:
            self.log.error("put msg from:{0}\tresult:{1}".format(self.conf.get('beanstalk_conf').get('output_tube'), str(e)))
        return link_attr

    # 每5条发送数据到队列
    def back_work(self, site, url_format, limit, start, extra_filter, tube_name):
        self.log.info(extra_filter)
        mod = 5
        start += 1
        limit -= 1
        times = limit / mod
        if limit % mod != 0:
            results = self.select_handler.select_webpage(site, url_format, limit % mod, start,extra_filter)
            for result in results:
                self.beanstalk.put(tube_name, self.to_string(result))
        start += limit % mod
        for i in xrange(times):
            results = self.select_handler.select_webpage(site, url_format, mod, start + i * mod,extra_filter)
            for result in results:
                self.beanstalk.put(tube_name, self.to_string(result))
            if len(results) != mod:
                break

    def select(self, site, url_format, limit, start=0, extra_filter='{}',tube_name='download_rsp'):
        ret = RetStatus()
        first_result = self.select_handler.select_webpage(site, url_format, 1, start,extra_filter)
        if not first_result:
            ret.status = 0
            return ret
        else:
            try:
                self.beanstalk.put(tube_name, self.to_string(first_result[0]))
                ret.status = 1
                if limit > 1:
                    # 创建守护进程,完成发送剩下数据
                    back_t = threading.Thread(target=self.back_work, args=(site, url_format, limit, start, extra_filter,tube_name))
                    back_t.setDaemon(True)
                    back_t.start()
                return ret
            except Exception, e:
                self.log.error("put msg from:{0}\tresult:{1}".format(tube_name, str(e)))
                ret.status = 0
                ret.errormessage = e.message
                return ret

    def select_one(self, url):
        result = self.select_handler.select_webpage_by_url(url)
        return result

# 从mysql获取mongo的配置信息
def get_mongo_conf(conf):
    mongo_conf = dict()
    try:
        db = MySQLdb.connect(host = conf.get('mysql_host'),
                             user = conf.get('mysql_user'),
                             passwd = conf.get('mysql_passwd'),
                             db = conf.get('mysql_db'),
                             port = conf.get('mysql_port', 3306))
        cursor = db.cursor()
        sql1 = "SELECT value FROM settings WHERE item = '{0}'".format('linkbase')
        cursor.execute(sql1)
        mongo_conf_str = cursor.fetchone()[0]
        mongo_conf_old = json.loads(mongo_conf_str)
        mongo_conf['mongo_host_linkattr'] = mongo_conf_old['host']
        mongo_conf['mongo_port_linkattr'] = int(mongo_conf_old['port'])
        # mongo_conf['mongo_host_linkattr'] = '127.0.0.1'
        # mongo_conf['mongo_port_linkattr'] = 27017

        mongo_conf['mongo_user_linkattr'] = mongo_conf_old['user']
        mongo_conf['mongo_passwd_linkattr'] = mongo_conf_old['password']
        mongo_conf['mongo_db_linkattr'] = mongo_conf_old['database']

        cursor2 = db.cursor()
        sql2 = "SELECT value FROM settings WHERE item = '{0}'".format('webpage')
        cursor2.execute(sql2)
        mongo_conf_str = cursor2.fetchone()[0]
        db.close()
        mongo_conf_old = json.loads(mongo_conf_str)
        mongo_conf['mongo_host_web'] = mongo_conf_old['host']
        mongo_conf['mongo_port_web'] = int(mongo_conf_old['port'])

        # mongo_conf['mongo_host_web'] = '127.0.0.1'
        # mongo_conf['mongo_port_web'] = 27017

        mongo_conf['mongo_user_web'] = mongo_conf_old['user']
        mongo_conf['mongo_passwd_web'] = mongo_conf_old['password']
        mongo_conf['mongo_db_web'] = mongo_conf_old['database']

    except:
        conf.get('log').error(traceback.format_exc())
    return mongo_conf


def main(conf):
    signal.signal(signal.SIGTERM, signal.SIG_DFL)
    signal.signal(signal.SIGINT, signal.SIG_DFL)
    signal.signal(signal.SIGUSR1, lambda a, b: profiling_signal_handler("crawler_merge", a, b))

    mongo_conf = get_mongo_conf(conf)
    if mongo_conf:
        convert = ConvertProccessor(conf.get('log'), mongo_conf)
        select_handler = SelectProcessor(conf.get('log'), mongo_conf, conf['beanstalk_conf'])
        merge_server = CrawlerMergeServer(conf, convert)
        merge_server.start()
        # 召回规则变动监听
        # dic_server = DicServer()
        # dic_server.start()
        try:
            handler = CrawlerMergeHandler(conf, convert, select_handler)
            processor = CrawlerMergeService.Processor(handler)
            transport = TSocket.TServerSocket(port=conf['server'].get('port'))
            tfactory = TTransport.TBufferedTransportFactory()
            pfactory = TBinaryProtocol.TBinaryProtocolFactory()
            server = TServer.TThreadPoolServer(processor, transport, tfactory, pfactory)
            server.setNumThreads(conf['server'].get('server_thread_num'))
            server.serve()
        except Exception, e:
            conf.get('log').error(str(e))
            merge_server.stop("fail")
            os._exit(1)
        merge_server.stop("success")


# 读取server.py的启动配置
def get_conf(file_path='./crawler_merge.toml'):
    try:
        opt, args = getopt.getopt(sys.argv[1:], 'f:', ['help'])
        for name, value in opt:
            if name == "-f":
                file_path = value
            elif name in ("-h", "--help"):
                print ''
                sys.exit()
            else:
                assert False, "unhandled option"
        with open(file_path, 'rb') as config:
            config = pytoml.load(config)
        log_name = config['server'].get('name') + str(config['server'].get('port'))
        config['log'] = LogHandler(log_name)
        return config
    except:
        print traceback.format_exc()
        sys.exit()


if __name__ == '__main__':
    # import conf
    # main(conf)

    conf = get_conf()
    main(conf)
