#!/usr/bin/env python 
# coding:utf-8
# @Time :10/11/18 11:47

from multiprocessing import Process

import sys

sys.path.append('../')
sys.path.append('../../')

from common.pybeanstalk import PyBeanstalk
from common.mongo import MongDb

from config.mongo_conf import MONGO_DB_SOURCE


from scheduler.company_name_loader import CompanyNameLoader
from worker.crawl_worker import CrawlWorker

from common.logger import AppLogger


global_log = AppLogger("wenshu_spider.log").get_logger()

# beanstalk = PyBeanstalk(LOCAL_BEANSTALK_CONF['host'], LOCAL_BEANSTALK_CONF['port'])

# mongo = MongDb(MONGO_DB_SOURCE['host'], MONGO_DB_SOURCE['port'], MONGO_DB_SOURCE['db'],
#                MONGO_DB_SOURCE['username'], MONGO_DB_SOURCE['password'], global_log)

crawl_worker = CrawlWorker(global_log)
# company_name_loader = CompanyNameLoader(beanstalk, mongo, global_log)


if __name__ == '__main__':
    crawl_job = Process(target=crawl_worker.start)
    # company_name_loader_job = Process(target=company_name_loader.start)

    crawl_job.start()
    # company_name_loader_job.start()

    crawl_job.join()
    # company_name_loader_job.join()

    print "all job done!!!"
