# -*- coding: utf-8 -*-
import json
import logging

import math
import time
import requests
from traceback import format_exc
from datetime import datetime
from requests import TooManyRedirects
from scrapy.utils.project import get_project_settings
from zc_core.middlewares.proxies.cached_pool import CachedProxyPool
from zc_core.model.items import Order
from zc_core.util.batch_gen import time_to_batch_no
from zc_core.util.common import parse_time
from zc_core.util.encrypt_util import base64_decode
from esgcc.simple.simple_dao import SimpleDao
from esgcc.simple.simple_session import SimpleSession

logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s: %(message)s',
    datefmt='%Y-%m-%d %H:%M:%S',
    filename='order_list_ios.log',
    filemode='a')
console = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(levelname)s: %(message)s')
console.setFormatter(formatter)
logging.getLogger().addHandler(console)


# 登录会话管理
simple_session = SimpleSession()
# cookies = {'JSESSIONID': '5A4A65598E15B8EE363B0B3E0D0D9D1A', '__d_s_': '74DB46A51500E46F323D8DD2DA3ADE8A',
#            '__s_f_c_s_': '10AC30904622FD37BB2A5AE8319A49EB', '__t_c_k_': 'cd701743086843f686db3c44e5994509'}
cookies = {}

# 构建校验器对象
settings = get_project_settings()
proxy_pool = CachedProxyPool(settings)
simple_dao = SimpleDao()
max_retry = 5
batch_retry_map = {}


def parse_total_page_ios(txt):
    total_page = 0
    ex_data = json.loads(txt).get('exData', '').replace('\n', '')
    str_decode = base64_decode(base64_decode(ex_data))
    data = json.loads(str_decode).get('resultDTO', {}).get('data', {})
    if data:
        total_num = data.get('totalNum', 0)
        page_size = data.get('page', {'pageSize': 10}).get('pageSize', 10)
        total_page = math.ceil(total_num / page_size)

    return total_page


def parse_order_list_ios(txt):
    order_list = list()
    ex_data = json.loads(txt).get('exData', '').replace('\n', '')
    str_decode = base64_decode(base64_decode(ex_data))
    data = json.loads(str_decode).get('resultDTO', {}).get('data', {})
    if data:
        orders = data.get('orders', [])
        if orders:
            for order in orders:
                order_id = order.get('orderId')
                order_dept = order.get('transactionDesc')
                order_time = parse_time(order.get('orderPayTimeStr'))
                page = Order()
                page['_id'] = order_id
                page['url'] = 'http://buser.esgcc.com.cn/order/recent_purchase_detail?orderId={}'.format(order_id)
                page['orderTime'] = order_time
                page['orderDept'] = order_dept
                page['batchNo'] = time_to_batch_no(order_time)
                page['genTime'] = datetime.utcnow()
                order_list.append(page)
    else:
        raise Exception('need to retry...')

    return order_list


def send_req(page):
    order_list_url = 'https://m.esgcc.com.cn/common/sendPostByJsonStr'
    headers = {
        'Host': 'm.esgcc.com.cn',
        'Accept': 'application/json',
        'path': '/mmainapi/mmain/ShowIndexService/getLatestOrdersPage',
        'Accept-Language': 'zh-cn',
        'Accept-Encoding': 'br, gzip, deflate',
        'type': 'getUserData',
        'token': 'a3ae44617b124901a3983228e1a1251c',
        'User-Agent': 'LingxingRN/2.2.7 CFNetwork/976 Darwin/18.2.0',
        'Connection': 'keep-alive',
        'Content-Type': 'application/json',
    }
    data = json.dumps({
        "pgs": 10,
        "pgn": page,
        "orderId": "",
        "loginAccount": "",
        "transactionDesc": ""
    })
    try:
        # 登录获取cookie
        # cookies = simple_session.get_cookies()
        rs = requests.post(
            url=order_list_url,
            cookies=cookies,
            headers=headers,
            data=data,
            timeout=180
        )
        return rs.text
    except TooManyRedirects as ex:
        logging.info('--> logout: %s' % time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()))
        simple_session.clean_cookies()
        # 重试
        logging.info('--> retry send_req: page=%s' % page)
        send_req(page)


def crawl_page(page):
    try:
        txt = send_req(page)
        order_list = parse_order_list_ios(txt)
        logging.info('page: %s, cnt=%s' % (page, len(order_list)))
        simple_dao.save_orders(order_list)
        time.sleep(1)
    except Exception as ex:
        cnt = batch_retry_map.get(str(page), 0)
        if cnt <= max_retry:
            batch_retry_map[str(page)] = cnt + 1
            logging.info('--> retry crawl_page: page=%s' % page)
            time.sleep(5)
            crawl_page(page)
        else:
            logging.info('--> give up retry crawl_page: page=%s' % page)


def crawl_batch():
    try:
        batch_retry_map = {}
        txt = send_req(1)
        total_page = parse_total_page_ios(txt)
        if total_page:
            logging.info('total page: %s' % total_page)
            for page in range(1, total_page + 1):
                # crawl_page(total_page - page + 1)
                crawl_page(page)
            return True
        else:
            logging.error('无分页: %s' % txt)
            return False
    except Exception as e:
        logging.info('work exception: %s' % format_exc())
        _ = e


def work():
    batch_no = time.strftime('%Y%m%d%H', time.localtime())
    logging.info('batch start[%s]: %s' % (batch_no, time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())))
    rs = crawl_batch()
    if not rs:
        # 重试
        for retry in range(5):
            rs = crawl_batch()
            if rs:
                break
    logging.info('batch end[%s]: %s' % (batch_no, time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())))
    logging.info('-----------------------')


if __name__ == '__main__':
    while True:
        work()
        time.sleep(3600 * 12)
