# -*- coding: utf-8 -*-
from scpy.logger import get_logger
import os
import sys
import json
from job51_by_company_id import Job51Crawler
from juxian_crawler_20167 import run_single_company
from lagou_job_crawler import LagouCrawler
from util.pgutil import PgUtil
from config.postgres_config import *
from scpy.xawesome_codechecker import get_ip
from util.recruitment_ana_util import job_info_trans
import requests
if get_ip().startswith('192.168'):
    from config.config import *
else:
    from config.aliconfig import *
reload(sys)
sys.setdefaultencoding('utf-8')
logger = get_logger(__file__)
CURRENT_PATH = os.path.dirname(__file__)
if CURRENT_PATH:
    CURRENT_PATH = CURRENT_PATH + "/"

pg = PgUtil()
conn = pg.get_conn()

GET_COMPANY_LIST = """
                   SELECT company_name FROM recruitment_monitor_company;
                   """


def get_monitor_list():
    try:
        resultList = pg.query_all_sql(GET_COMPANY_LIST)
        logger.info('geting monitor company list')
        return resultList
    except:
        logger.info('getting monitor company list failed')
        conn.rollback()
        return []


def main():
    companyList = get_monitor_list()
    count = 0
    for item in companyList:
        companyName = item.get('company_name')
        logger.info('crawler recruitment NO.%d company %s'%(count + 1, companyName))
        Job51Crawler(companyName).run()
        LagouCrawler(companyName).run()
        logger.info('crawler resume NO.%d company %s'%(count + 1, companyName))
        run_single_company(companyName)
        count += 1


def main_recruitment():
    companyList = get_monitor_list()
    count = 0
    for item in companyList:
        companyName = item.get('company_name')
        logger.info('crawler recruitment NOO.%d company %s'%(count + 1,companyName))
        Job51Crawler(companyName).run()
        LagouCrawler(companyName).run()
        count += 1


def main_resume():
    # companyList = get_monitor_list()
    count = 0
    # for item in companyList:
    #     companyName = item.get('company_name')
    #     logger.info('crawler resume NO.%d company %s'%(count + 1, companyName))
    #     run_single_company(companyName)
    #     count += 1
    index = 1
    while True:
        params = {
            'index': index,
            'size': 20,
        }
        response = requests.get('http://192.168.31.116:6060/api/crawler/monitor/companies/page', params=params)
        result = json.loads(response.content)
        for company_name in result['content']:
            run_single_company(company_name)
        if result['last'] == False:
            index += 1
        else:
            break

if __name__ == '__main__':
    if len(sys.argv) < 2:
        main()
    elif sys.argv[1] == 'recruitment':
        main_recruitment()
    elif sys.argv[1] == 'resume':
        main_resume()
    else:
        logger.warn('arg type failed')
        sys.exit()