# -*- coding: utf-8 -*-
from scpy.logger import get_logger
import os
import sys
from pymongo import MongoClient
from util.trans_recruitment_format import job_info_trans
from util.SCAWS import S3
from util.pgutil import PgUtil
import hashlib
import json
import datetime

reload(sys)
sys.setdefaultencoding('utf-8')

logger = get_logger(__file__)

CURRENT_PATH = os.path.dirname(__file__)
if CURRENT_PATH:
    CURRENT_PATH = CURRENT_PATH + "/"

MONGODB_SERVER = "192.168.31.114"
MONGODB_PORT = 27017
MONGODB_DB_RECRUITMENT = "crawler_company_all"
MONGODB_COLLECTION_RECRUITMENT = 'recruitment'
DB_MONGO = MongoClient(MONGODB_SERVER,MONGODB_PORT)
coll = DB_MONGO[MONGODB_DB_RECRUITMENT][MONGODB_COLLECTION_RECRUITMENT]
S3_AWS = S3()
BUCKET_NAME = 'recruitment'
pgsql = PgUtil()
INSERT_RECRUITMENT_INFO = """
                          INSERT INTO recruitment(
                                  url,
                                  company_name,
                                  category_type,
                                  category,
                                  salary,
                                  city,
                                  working_years,
                                  degree,
                                  job_title,
                                  description,
                                  source,
                                  company_url,
                                  entity,
                                  keywords_des,
                                  welfare,
                                  keywords,
                                  logo,
                                  scale,
                                  require_num,
                                  job_nature,
                                  industry,
                                  position,
                                  release_time,
                                  update_time
                          )  VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);
                          """


def main():
    count = 1
    for record in coll.find().batch_size(1):
        jobInfo = job_info_trans(record)
        jobInfo.pop('updateTime')
        file_name = hashlib.sha256(json.dumps(jobInfo)).hexdigest()
        origin_key = 'src/'
        time = datetime.datetime.now()
        origin_key = origin_key + str(time.year) + '/' + str(time.month) + '/' + str(time.day) + '/' + file_name + '.json'
        if S3_AWS.upload_file(BUCKET_NAME, origin_key, json.dumps(jobInfo,ensure_ascii=False,indent=1)):
            logger.info('trans No.%d mongo recruitment info success'%count)
        else:
            logger.warn('trans No.%d mongo recruitment info failed'%count)
        count += 1


def check_exist(url):
    sql = 'select * from recruitment where url=%s'
    result = pgsql.select_sql(sql,(url,))
    if result:
        return True
    else:
        return False


def main_trans_mongo_to_pgsql():
    count = 1
    for record in coll.find().batch_size(1):
        info = job_info_trans(record)
        info.pop('updateTime')
        try:
            _keys = [
                'sourceUrl',
                'companyName',
                'categoryType',
                'category',
                'salary',
                'city',
                'workingYears',
                'degree',
                'jobTitle',
                'description',
                'source',
                'companyUrl',
                'entity',
                'keywordsDes',
                'welfare',
                'keywords',
                'logo',
                'scale',
                'requireNum',
                'jobNature',
                'industry',
                'position',
                'releaseTime'
            ]
            _url = info['sourceUrl']
            _values = [info[key] for key in _keys]
            _values.append(datetime.datetime.now())
            if not check_exist(_url):
                pgsql.execute_insert_sql(INSERT_RECRUITMENT_INFO,_values)
                logger.info('insert No.%d mongo info into pgsql'%count)
            else:
                logger.info('insert No.%d mongo info already exist'%count)
        except Exception,e:
            logger.error('insert recruitment info pgsql failed for %s'%str(e))
        count += 1

def find_info_back(skip_num,limit_num):
    count = skip_num + 1
    for record in coll.find().skip(skip_num).limit(limit_num).batch_size(1):
        salary_text = record.get('salary', '')
        url = record.get('_id', '')
        address = record.get('address', '')
        working_year_text = record.get('workingYears','')
        query = """
                update recruitment set salary_text=%s, working_years_text=%s,address=%s where
                url=%s;
                """
        values = (salary_text,working_year_text,address,url)
        try:
            pgsql.execute_update_sql(query,values)
            logger.info('update No.%d mongo info into pgsql'%count)
        except Exception,e:
            logger.error('update recruitment info pgsql failed for %s'%str(e))
        count += 1

if __name__ == '__main__':
    find_info_back(int(sys.argv[1]), int(sys.argv[2]))
    # main()