# -*- coding: utf-8 -*-
from scpy.logger import get_logger
import os
import sys
from util.SCAWS import S3,SQS
import hashlib
import json
import datetime
from util.pgutil import PgUtil
import time

reload(sys)
sys.setdefaultencoding('utf-8')

logger = get_logger(__file__)

CURRENT_PATH = os.path.dirname(__file__)
if CURRENT_PATH:
    CURRENT_PATH = CURRENT_PATH + "/"

BUCKET_NAME = 'recruitment'
# KEY_PATH = 'src/2016/11/7'
KEY_PATH_ORIGIN = 'src/'
KEY_PATH = KEY_PATH_ORIGIN + str(datetime.datetime.now().year) + '/' + str(datetime.datetime.now().month) + '/' + str(datetime.datetime.now().day) + '/'
QUEUE_NAME = 'recruitment_trans'
MAX_NUM = 1000
s3 = S3()
sqs = SQS()


def main():
    sqs_client = sqs._client
    sqs.create_queue(QUEUE_NAME)
    startMarker = ''
    client = s3._client
    count = 1
    while True:
        # response = client.list_objects(Bucket='recruitment',Delimiter='1',Prefix='src/2016/11/4')

        response = client.list_objects(Bucket=BUCKET_NAME, Prefix=KEY_PATH, MaxKeys=MAX_NUM, Marker=startMarker)
        for item in response.get('Contents',[]):
            key = item.get('Key')
            try:
                with open('data.json', 'wb') as data:
                    client.download_fileobj(BUCKET_NAME, key, data)
                    logger.info('download NO.%d file from aws s3'%(count+1))
                with open('data.json', 'rb') as f:
                    info = json.loads(f.read())
                    sqs.send_message(json.dumps(info),queue_name=QUEUE_NAME)
                logger.info('send No.%d file in s3 [recruitment]'%count)
            except Exception,e:
                logger.error('produce recruitment info failed for %s'%str(e))
            count += 1
        if response.get('IsTruncated', False):
            startMarker = response.get('Contents')[MAX_NUM-1].get('Key')
        else:
            break


if __name__ == '__main__':
    main()