# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
from scpy.logger import get_logger
import os
import sys
sys.path.append('/home/scdev/cc_git/crawler-recruitment/util')
import datetime
import json
import requests
from pymongo import MongoClient
from job51_by_company_id import Job51Crawler
from juxian_crawler_20167 import run_single_company
from lagou_job_crawler import LagouCrawler
from util.pgutil import PgUtil,Pgutil_Local
from config.postgres_config import *
from scpy.xawesome_codechecker import get_ip
from util.recruitment_ana_util import job_info_trans

if get_ip().startswith('192.168'):
    from config.config import *
else:
    from config.aliconfig import *

# reload(sys)
sys.setdefaultencoding('utf-8')

logger = get_logger(__file__)

CURRENT_PATH = os.path.dirname(__file__)
if CURRENT_PATH:
    CURRENT_PATH = CURRENT_PATH + "/"


INSERT_RECRUITMENT_INFO = """
                          INSERT INTO recruitment(
                          company_name,
                          city,
                          entity,
                          title,
                          industry,
                          scale,
                          short_name,
                          working_years,
                          salary,
                          degree,
                          category,
                          keywords,
                          company_id,
                          nature,
                          release_time,
                          logo_url,
                          welfare,
                          url,
                          description,
                          info_source,
                          update_time
                          )  VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);
                          """
INSERT_RECRUITMENT_ANA_INFO = """
                          INSERT INTO recruitment_analysis(
                          company_name,
                          url,
                          salary,
                          city,
                          category,
                          category_type,
                          degree,
                          working_years,
                          keywords,
                          keywords_des,
                          release_time,
                          update_time,
                          industry
                          )  VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);
                          """

UPDATE_RECRUITMENT_ANA_INFO = """
                              UPDATE recruitment_analysis SET
                              update_time=%s WHERE url=%s;
                              """


UPDATE_RECRUITMENT_UPDATETIME = """
                          UPDATE recruitment SET update_time=%s WHERE url=%s;
                          """

CHECK_RECRUITMENT_EXIST = """
                          SELECT company_name FROM recruitment WHERE url=%s;
                          """

CHECK_RECRUITMENT_ANA_EXIST = """
                              SELECT company_name FROM recruitment_analysis WHERE url=%s;
                              """

INSERT_COMPANY_INFO = """
                      INSERT INTO recruitment_monitor_company(
                      company_name,
                      create_time,
                      industry
                      ) VALUES(%s,%s,%s);
                      """
UPDATE_COMPANY_INFO = """
                    UPDATE recruitment_monitor_company SET
                    industry=%s
                    WHERE company_name=%s;
                      """

CHECK_COMPANY_EXIST = """
                      SELECT company_name FROM recruitment_monitor_company WHERE company_name=%s;
                      """

GET_COMPANY_LIST = """
                   SELECT company_name FROM recruitment_monitor_company;
                   """

SEARCH_RECRUITMENT_INFO = """
                          SELECT * FROM recruitment;
                          """

UPDATE_RECRUITMENT_FORMAT = """
                            UPDATE recruitment SET
                            category=%s,
                            category_type=%s,
                            salary=%s
                            WHERE url=%s;
                            """

pg = PgUtil()
conn = pg.get_conn()
DB = MongoClient(MONGODB_SERVER,MONGODB_PORT)

# -------------------------------
# re write LagouCrawler class FUN[saving] add class FUN[check_recruitment_info_exist]

class lagou_job_crawler_postgres(LagouCrawler):

    def check_recruitment_info_exist(self,url):
        try:
            res = pg.select_sql(CHECK_RECRUITMENT_EXIST,(url,))
            if res:
                logger.info('recruitment info already exist')
                return True
            else:
                logger.info('recruitment info do not exist')
                return False
        except:
            logger.info('checking recruitment info failed')
            conn.rollback()
            return False

    def save(self, dataItem):
        # for new table structure
        # dataItem = job_info_trans(dataItem)
        # try:
        #     dataItem['salary'] = str(dataItem['salary'])
        # except Exception,e:
        #     logger.info('salary trans failed for %s'%str(e))
        _url = dataItem['sourceUrl']
        if not self.check_recruitment_info_exist(_url):
            _keys = ['companyName',
                     'sourceCity',
                     'entity',
                     'jobTitle',
                     'industry',
                     'scale',
                     'companyShortName',
                     'workingYears',
                     'salary',
                     'degree',
                     'category',
                     # 'categoryType',
                     'keywords',
                     'companyId',
                     'jobNature',
                     'releaseTime',
                     'logo',
                     'welfare',
                     'sourceUrl',
                     'description'
                     ]
            values = [dataItem[key] for key in _keys]
            # ---add outside info
            values.append('lagou')
            values.append(datetime.datetime.now())
            # ---add info end
            try:
                pg.execute_insert_sql(INSERT_RECRUITMENT_INFO,values)
                logger.info('insert new recruitment info')
            except:
                logger.info('lagou recruitment info insert failed')
                conn.rollback()
        else:
            try:
                pg.execute_insert_sql(UPDATE_RECRUITMENT_UPDATETIME, (datetime.datetime.now(),_url))
                logger.info('update recruitment info')
            except:
                logger.info('update recruitment info failed')
                conn.rollback()

# ------------------LAGOU CRAWLER END------------------------


# -----------------------------------------------------------
# re write Job51Crawler class FUN[saving] add class FUN[check_recruitment_info_exist]
class job51_crawler_postgres(Job51Crawler):

    def save(self, dataItem):
        # for new table structure
        # dataItem = job_info_trans(dataItem)
        # try:
        #     dataItem['salary'] = str(dataItem['salary'])
        # except Exception,e:
        #     logger.info('salary trans failed for %s'%str(e))
        _url = dataItem['sourceUrl']
        if not self.check_recruitment_info_exist(_url):
            _keys = ['companyName',
                     'sourceCity',
                     'entity',
                     'jobTitle',
                     'industry',
                     'scale',
                     'companyShortName',
                     'workingYears',
                     'salary',
                     'degree',
                     'category',
                     # 'categoryType',
                     'keywords',
                     'companyId',
                     'jobNature',
                     'releaseTime',
                     'logo',
                     'welfare',
                     'sourceUrl',
                     'description'
                     ]
            values = [dataItem[key] for key in _keys]
            # ---add outside info
            values.append('51job')
            values.append(datetime.datetime.now())
            # ---add info end
            try:
                pg.execute_insert_sql(INSERT_RECRUITMENT_INFO,values)
                logger.info('insert new recruitment info')
            except:
                logger.info('lagou recruitment info insert failed')
                conn.rollback()
        else:
            try:
                pg.execute_insert_sql(UPDATE_RECRUITMENT_UPDATETIME, (datetime.datetime.now(),_url))
                logger.info('update recruitment info')
            except:
                logger.info('update recruitment info failed')
                conn.rollback()

    def check_recruitment_info_exist(self,url):
        try:
            res = pg.select_sql(CHECK_RECRUITMENT_EXIST,(url,))
            if res:
                logger.info('recruitment info already exist')
                return True
            else:
                logger.info('recruitment info do not exist')
                return False
        except:
            logger.info('checking recruitment info failed')
            conn.rollback()
            return False
# -------------- 51job CRAWLER END----------------------------


# --------------INSERT COMPANY INFO BEGIN----------------------
def company_industry_info(companyName):
    """
    :param companyName:
    :return: 返回查询公司的基本信息:'province','city','address','industryType','scale','companyStatus'
    """
    session = requests.session()
    industryUrl = INDUSTRYURL.format(companyName)
    # response = session.get(industryUrl)
    response = requests.post(url=industryUrl, data=json.dumps({'feaType':'custom',
                                                             'source':['province','city','address','industryType','scale','companyStatus'],
                                                             'companyNameList':[companyName]}))
    if response.status_code == 200:
        dataInfo = response.json().get('result')
        if len(dataInfo):
            return dataInfo[0]
        else:
            return {}
    else:
        return {}


def insert_company_info(companyName):
    """
    :param companyName:公司完整名称
    :return:将公司信息插入到postgres数据库
    """
    companyInfo = company_industry_info(companyName)
    if len(companyInfo) == 0:
        logger.info('do not get company info for %s only insert companyName'%companyName)
        pg.execute_insert_sql('insert into recruitment_monitor_company(company_name,create_time) VALUES (%s,%s)',(companyName,datetime.datetime.now()))
    _keyList = [
        'scale',
        'industryType',
        'province',
    ]
    if not check_recruitment_monitor_company_exist(companyName):
        values = [companyInfo['industryType']]
        # add info for postgres sql
        values.insert(0, datetime.datetime.now())
        values.insert(0, companyName)
        try:
            pg.execute_insert_sql(INSERT_COMPANY_INFO, values)
            logger.info('insert company %s info'%companyName)
        except Exception,e:
            logger.info('insert company info failed for %s'%e)
            conn.rollback()
    else:
        try:
            values = [companyInfo['industryType']]
            values.append(companyName)
            pg.execute_update_sql(UPDATE_COMPANY_INFO,values)
            logger.info('company info already exist, update info')
        except Exception,e:
            logger.error("update exist company info failed for %s"%str(e))


def check_recruitment_monitor_company_exist(companyName):
    """
    :param companyName: 公司完整名称
    :return:postgres 数据库中是否有该公司信息
    """
    try:
        res = pg.select_sql(CHECK_COMPANY_EXIST,(companyName,))
        if res:
            logger.info('company %s info already exist'%companyName)
            return True
        else:
            logger.info('company %s info do not exist'%companyName)
            return False
    except:
        logger.info('checking company info failed')
        conn.rollback()
        return False


def insert_monitor_company_list():
    index = 1
    while True:
        params = {
            'index': index,
            'size': 20,
        }
        try:
            response = requests.get('http://192.168.31.116:6060/api/crawler/monitor/companies/page', params=params)
        except:
            logger.info('request failed, retry')
            continue
        result = json.loads(response.content)
        for company_name in result['content']:
            insert_company_info(company_name)
        if result['last'] == False:
            index += 1
        else:
            break


def get_monitor_list():
    try:
        resultList = pg.query_all_sql(GET_COMPANY_LIST)
        logger.info('geting monitor company list')
        return resultList
    except:
        logger.info('getting monitor company list failed')
        conn.rollback()
        return []
# --------------INSRT COMPANY INFO END-----------------------


# -------------- Trans Mongo INFO --------------------------
def check_recruitment_info_exist(url):
        try:
            res = pg.select_sql(CHECK_RECRUITMENT_EXIST,(url,))
            if res:
                logger.info('recruitment info already exist')
                return True
            else:
                logger.info('recruitment info do not exist')
                return False
        except:
            logger.info('checking recruitment info failed')
            conn.rollback()
            return False

def saving_recruiment_info(dataItem):
        _url = dataItem['sourceUrl']
        if not check_recruitment_info_exist(_url):
            _keys = ['companyName',
                     'sourceCity',
                     'entity',
                     'jobTitle',
                     'industry',
                     'scale',
                     'companyShortName',
                     'workingYears',
                     'salary',
                     'degree',
                     'category',
                     'keywords',
                     'companyId',
                     'jobNature',
                     'releaseTime',
                     'logo',
                     'welfare',
                     'sourceUrl',
                     'description',
                     'source',
                     ]
            try:
                dataItem['keywords'] = dataItem['keywords'].split(' ')
            except:
                dataItem['keywords'] = []
            try:
                values = [dataItem[key] for key in _keys]
            except Exception, e:
                print str(e)
            # ---add outside info
            # values.append('lagou')
            values.append(datetime.datetime.now())

            # ---add info end
            try:
                pg.execute_insert_sql(INSERT_RECRUITMENT_INFO,values)
                logger.info('insert new recruitment info')
            except:
                logger.info('mongo recruitment info insert failed')
                conn.rollback()
        else:
            try:
                pg.execute_insert_sql(UPDATE_RECRUITMENT_UPDATETIME, (datetime.datetime.now(),_url))
                logger.info('update recruitment info')
            except:
                logger.info('update recruitment info failed')
                conn.rollback()


def trans_recruitment_from_mongo(companyName):
    try:
        count = 0
        recruitmentList = list()
        coll = DB[MONGODB_DB_RECRUITMENT][MONGODB_COLLECTION_RECRUITMENT]
        for record in coll.find({'companyName':companyName}):
            count += 1
            # recruitmentList.append(record)
            saving_recruiment_info(record)
    except:
        logger.info('trans recruitment info of %s FUN failed'%companyName)
# -------------Trans Mongo INFO END --------------------


# -------------update recruitment info -----------------

# -------- category/category_type/salary ---------------
def update_recruitment_format_info():
    try:
        res = pg.query_all_sql(SEARCH_RECRUITMENT_INFO)
        _keys = [
            'category',
            'categoryType',
            'salary',
            'url'
        ]
        count = 0
        for item in res:
            # print item
            item = job_info_trans(item)
            try:
                item['salary'] = str(item['salary'])
            except Exception,e:
                logger.info('salary trans failed for %s'%str(e))
            values = [item[key] for key in _keys]
            try:
                pg.execute_update_sql(UPDATE_RECRUITMENT_FORMAT,values)
            except Exception,e:
                logger.info('update recruitment info on category failed for %s'%str(e))
                conn.rollback()
            count += 1
            logger.info('trans NO.%d recruitment info'%count)

    except Exception,e:
        logger.info('update recruitment info on category failed')
        conn.rollback()
# -------- category/category_type/salary end -------------

# -------- trans recruitment info to analysis table --------
def check_recruitment_ana_info_exist(url):
        try:
            res = pg.select_sql(CHECK_RECRUITMENT_ANA_EXIST,(url,))
            if res:
                logger.info('recruitment ana info already exist')
                return True
            else:
                logger.info('recruitment ana info do not exist')
                return False
        except:
            logger.info('checking recruitment info failed')
            conn.rollback()
            return False


def trans_recruitment_info_to_ana():
    """
      company_name         TEXT NOT NULL ,
      url                  TEXT NOT NULL UNIQUE ,
      salary               FLOAT ,
      city                 TEXT ,
      category             TEXT ,
      category_type        TEXT ,
      degree               TEXT ,
      working_years        TEXT ,
      keywords             TEXT [] ,
      keywords_des         TEXT [] ,
      release_time         TIMESTAMP ,
      update_time          TIMESTAMP
      industry             TEXT [] ,
    :return:
    """
    try:
        resList = pg.query_all_sql(SEARCH_RECRUITMENT_INFO)
        for res in resList:
            res = job_info_trans(res)
            _url = res.get('url')
            _keys = [
                'company_name',
                'url',
                'salary',
                'city',
                'category',
                'category_type',
                'degree',
                'working_years',
                'keywords',
                'keywords_des',
                'release_time',
                'update_time',
            ]
            values = [res[key] for key in _keys]
            query_res = pg.select_sql('SELECT industry FROM recruitment_monitor_company WHERE company_name=%s',(res.get('company_name', ''),))
            if query_res:
                industry = query_res[0]
            else:
                industry = []
            values.append(industry)
            if check_recruitment_ana_info_exist(_url):
                logger.info('recruitment analysis info already exist,update info')
                pg.execute_update_sql(UPDATE_RECRUITMENT_ANA_INFO, (res.get('update_time',datetime.datetime.now().strftime('%Y-%m-%d')),res.get('url', '')))
            else:
                logger.info('recruitment analysis info do not exist,insert info')
                pg.execute_insert_sql(INSERT_RECRUITMENT_ANA_INFO,values)
    except Exception,e:
        logger.info('trans recruitment info to analysis table failed for %s'%str(e))
        conn.rollback()

# -------------similar company operation -----------------
def search_similar_company(companyInfo, count=100):
    """
    :param companyInfo: {companyName:'', industry:[], scale:'', province:''}
    :param count: 返回的公司数量上限
    :return: 相似公司列表(包括公司信息)
    :source: huoke api
    """
    industry = companyInfo.get('industryType', '')
    scale = companyInfo.get('scale', '')
    province = companyInfo.get('province', '')
    company_name = companyInfo.get('companyName', '')
    comp_list = []
    query = {'userID': u'sst', 'userType': u'SC_BSC', 'count': count, 'esIndex': 'profile',
             'industryType': ','.join(industry), 'scale': scale, 'province': province,
             'source': ["company", "industryType", "scale", "province"]}
    logger.info('industry types for the company %s' % ','.join(industry))
    data = requests.post(url=PROFILE_URL, data=json.dumps(query))
    if data.status_code == 200:
        results = data.json()

        for r in results.get("result").get('data'):
            if r.get("company") == company_name:
                continue
            comp_list.append({'company': r.get("company"), 'industryType': r.get("industryType"),
                              'province': r.get('province'), 'scale': r.get('scale')})
        return comp_list
    else:
        return []


def insert_similar_company():
    companyList = [ u'杭州誉存科技有限公司', u'重庆澜鼎信息技术有限公司', u'上汽大众汽车有限公司', u'重庆金易房地产开发（集团）有限公司', u'无锡市中卫大药房有限公司',\
               u'江门市江磁电工企业有限公司', u'重庆万光实业集团有限公司', u'重庆钢运置业代理有限公司', u'重庆市金牛线缆有限公司', u'安投融（北京）网络科技有限公司']
    for companyName in companyList:
        companyInfo = company_industry_info(companyName)
        companyInfo['companyName'] = companyName
        similar_company_list = search_similar_company(companyInfo)
        for similar_company_info in similar_company_list:
            insert_company_info(similar_company_info.get('company', ''))
# -------------similar company operation end---------------


# ------------------ main process -------------------------
def main():
    companyList = get_monitor_list()
    count = 0
    for item in companyList:
        companyName = item.get('company_name')
        logger.info('crawler NO.%d company %s'%(count + 1, companyName))
        job51_crawler_postgres(companyName).run()
        lagou_job_crawler_postgres(companyName).run()
        count += 1


def main_trans_mongo_info():
    companyList = get_monitor_list()
    count = 0
    for item in companyList:
        companyName = item.get('company_name')
        logger.info('trans NO.%d company %s mongo info to postgres'%(count + 1, companyName))
        trans_recruitment_from_mongo(companyName)
        count += 1
# ------------------ main process end ---------------------


# ------------ test part ---------------

def test():
    # companyName = u'重庆澜鼎信息技术有限公司'
    # lagou_job_crawler_postgres(companyName).run()
    # update_recruitment_format_info()
    trans_recruitment_info_to_ana()
    
# ------------ test part end------------


if __name__ == '__main__':
    # trans_recruitment_from_mongo(u'杭州誉存科技有限公司')
    # companyList = [ u'杭州誉存科技有限公司', u'重庆澜鼎信息技术有限公司', u'上汽大众汽车有限公司', u'重庆金易房地产开发（集团）有限公司', u'无锡市中卫大药房有限公司',\
    #            u'江门市江磁电工企业有限公司', u'重庆万光实业集团有限公司', u'重庆钢运置业代理有限公司', u'重庆市金牛线缆有限公司', u'安投融（北京）网络科技有限公司']
    # insert_monitor_company_list(companyList)
    # main_trans_mongo_info()
    # insert_similar_company()
    # test()
    # main()
    # company_industry_info(u'江西世阳快运有限公司九江分公司')

    insert_monitor_company_list()


