# -*- coding: utf-8 -*-
from scpy.logger import get_logger
import os
import sys
import datetime
from job51_by_company_id import Job51Crawler
from juxian_crawler_20167 import run_single_company
from lagou_job_crawler import LagouCrawler
from pymongo import MongoClient
from scpy.xawesome_codechecker import get_ip
if get_ip().startswith('192.168'):
    from config.config import *
else:
    from config.aliconfig import *

reload(sys)
sys.setdefaultencoding('utf-8')

logger = get_logger(__file__)

CURRENT_PATH = os.path.dirname(__file__)
if CURRENT_PATH:
    CURRENT_PATH = CURRENT_PATH + "/"

MONGO = MongoClient(MONGODB_SERVER, MONGODB_PORT)


def insert_company_list(company_list):
    for company in company_list:
        data = {
            'companyName': company,
            'modify': str(datetime.datetime.now().date()),
        }
        MONGO[MONGODB_DB_MONITOR][MONGODB_COLLECTION_MONITOR_COMPANYLIST].update({
            'companyName':company
        },
            {
            '$set': data,
        }, upsert=True)


def search_monitor_company_list():
    count = 0
    company_list = list()
    for record in MONGO[MONGODB_DB_MONITOR][MONGODB_COLLECTION_MONITOR_COMPANYLIST].find():
        if record.get('companyName'):
            company_list.append(record.get('companyName'))
            count += 1
    return company_list, count


def crawler_for_company(company):
    try:
        logger.info('crawler recruitment info of %s in lagou...'%company)
        if LagouCrawler(company).run() != 200:
            logger.info('search %s info in lagou failed!'%company)
    except:
        logger.info('search %s info in lagou failed!'%company)
    try:
        logger.info('crawler recruitment info of %s in 51job...'%company)
        if Job51Crawler(company).run() != 200:
            logger.info('search %s info in 51job failed!'%company)
    except:
        logger.info('search %s info in 51job failed!'%company)
    try:
        logger.info('crawler resume info of %s in juxian...'%company)
        if run_single_company(company) != 200:
            logger.info('search %s info in juxian failed!'%company)
    except:
        logger.info('search %s info in juxian failed!'%company)

def main():
    company_list, total = search_monitor_company_list()
    logger.info('monitor database has %d companys'%total)
    for index, company in enumerate(company_list):
        logger.info('*'*200)
        logger.info('searching infomation for NO.%d company %s...'%(index + 1, company))
        crawler_for_company(company)


if __name__ == '__main__':
    # companyList = [ u'杭州誉存科技有限公司', u'重庆澜鼎信息技术有限公司', u'上汽大众汽车有限公司', u'重庆金易房地产开发（集团）有限公司', u'无锡市中卫大药房有限公司',\
    #            u'江门市江磁电工企业有限公司'u'重庆万光实业集团有限公司', u'重庆钢运置业代理有限公司', u'重庆市金牛线缆有限公司', u'安投融（北京）网络科技有限公司']
    # insert_company_list(companyList)
    main()