# -*- coding: utf-8 -*-
from scpy.logger import get_logger
import os
import sys
from util.SCAWS import S3,SQS
import hashlib
import json
import datetime
from util.pgutil import PgUtil
import time

reload(sys)
sys.setdefaultencoding('utf-8')

logger = get_logger(__file__)

CURRENT_PATH = os.path.dirname(__file__)
if CURRENT_PATH:
    CURRENT_PATH = CURRENT_PATH + "/"

BUCKET_NAME = 'recruitment'
KEY_PATH = 'src/2016/11/7'
QUEUE_NAME = 'recruitment_trans'
MAX_NUM = 1000
s3 = S3()
sqs = SQS()
pgsql = PgUtil()

INSERT_RECRUITMENT_INFO = """
                          INSERT INTO recruitment(
                                  url,
                                  company_name,
                                  category_type,
                                  category,
                                  salary,
                                  city,
                                  working_years,
                                  degree,
                                  job_title,
                                  description,
                                  source,
                                  company_url,
                                  entity,
                                  keywords_des,
                                  welfare,
                                  keywords,
                                  logo,
                                  scale,
                                  require_num,
                                  job_nature,
                                  industry,
                                  position,
                                  release_time,
                                  update_time
                          )  VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);
                          """


def donwload_file_update_pgsql(content):
    try:
        for item in content:
            pass
    except Exception,e:
        logger.error('download file and update pgsql failed for %s'%str(e))


def check_exist(url):
    sql = 'select * from recruitment where url=%s'
    result = pgsql.select_sql(sql,(url,))
    if result:
        return True
    else:
        return False

def recruitment_producer():
    sqs_client = sqs._client
    sqs.create_queue(QUEUE_NAME)
    startMarker = ''
    client = s3._client
    count = 1
    while True:
        # response = client.list_objects(Bucket='recruitment',Delimiter='1',Prefix='src/2016/11/4')
        response = client.list_objects(Bucket='recruitment', Prefix='src/2016/11/7', MaxKeys=MAX_NUM, Marker=startMarker)
        for item in response.get('Contents',[]):
            key = item.get('Key')
            with open('data.json', 'wb') as data:
                client.download_fileobj('recruitment', key, data)
                logger.info('download NO.%d file from aws s3'%(count+1))
            with open('data.json', 'rb') as f:
                info = json.loads(f.read())
                sqs.send_message(json.dumps(info),queue_name=QUEUE_NAME)
            logger.info('send No.%d file in s3 [recruitment]'%count)
            count += 1
        if response.get('IsTruncated', False):
            startMarker = response.get('Contents')[MAX_NUM-1].get('Key')
        else:
            break


def recruitment_consumer():
    while True:
        result = sqs.receive_message(queue_name=QUEUE_NAME)
        if len(result):
            info = json.loads(result[0].get('Body'))
            _keys = [
                'sourceUrl',
                'companyName',
                'categoryType',
                'category',
                'salary',
                'city',
                'workingYears',
                'degree',
                'jobTitle',
                'description',
                'source',
                'companyUrl',
                'entity',
                'keywordsDes',
                'welfare',
                'keywords',
                'logo',
                'scale',
                'requireNum',
                'jobNature',
                'industry',
                'position',
                'releaseTime'
            ]
            _url = info['sourceUrl']
            _values = [info[key] for key in _keys]
            _values.append(datetime.datetime.now())
            if not check_exist(_url):
                pgsql.execute_insert_sql(INSERT_RECRUITMENT_INFO,_values)
                logger.info('insert s3 file')
            else:
                logger.info('s3 file already exist')
        else:
            logger.info('message is empty sleeping 6s')
            time.sleep(6)

def main():
    result = list()
    startMarker = ''
    client = s3._client
    count = 0
    while True:
        # response = client.list_objects(Bucket='recruitment',Delimiter='1',Prefix='src/2016/11/4')
        response = client.list_objects(Bucket='recruitment', Prefix='src/2016/11/7', MaxKeys=MAX_NUM, Marker=startMarker)
        for item in response.get('Contents',[]):
            key = item.get('Key')
            with open('data.json', 'wb') as data:
                client.download_fileobj('recruitment', key, data)
                logger.info('download NO.%d file from aws s3'%(count+1))
            with open('data.json', 'rb') as f:
                info = json.loads(f.read())
                _keys = [
                    'sourceUrl',
                    'companyName',
                    'categoryType',
                    'category',
                    'salary',
                    'city',
                    'workingYears',
                    'degree',
                    'jobTitle',
                    'description',
                    'source',
                    'companyUrl',
                    'entity',
                    'keywordsDes',
                    'welfare',
                    'keywords',
                    'logo',
                    'scale',
                    'requireNum',
                    'jobNature',
                    'industry',
                    'position',
                    'releaseTime'
                ]
                _url = info['sourceUrl']
                _values = [info[key] for key in _keys]
                _values.append(datetime.datetime.now())
                if not check_exist(_url):
                    pgsql.execute_insert_sql(INSERT_RECRUITMENT_INFO,_values)
                    logger.info('insert No.%d s3 file'%(count+1))
                else:
                    logger.info('No.%d s3 file already exist'%(count+1))
            count += 1
        if response.get('IsTruncated', False):
            startMarker = response.get('Contents')[MAX_NUM-1].get('Key')
        else:
            break

    return result

if __name__ == '__main__':
    recruitment_consumer()
    # recruitment_producer()