# -*- coding: utf-8 -*-
import json
import logging
import os
import sys
import time
from datetime import timedelta, datetime

import requests
from lxml import etree

# add up one level dir into sys path

sys.path.append(os.path.abspath(os.path.dirname(os.path.dirname(
    os.path.dirname(os.path.dirname(os.path.dirname(__file__)))))))
os.environ['DJANGO_SETTINGS_MODULE'] = 'base.settings'

from common.lottery.cyclical.xj_ssc.db.activity import fullfill_result
from common.lottery import LOTTERY_TYPE, OPENCAI_LOTTERY_TYPES
from common.timer import TIMER_EVENT_TYPE, TimerEvent
from common.utils import tz
from common.lottery.handler import crawl_from_opencai

PROXY_URL = 'http://103.230.243.40:5000/proxy'
URL = 'http://api.1680210.com/CQShiCai/getBaseCQShiCaiList.do?date=%s&lotCode=10004'

URL_500 = 'http://kaijiang.500.com/static/info/kaijiang/xml/xjssc/%s.xml'


def _get_default_date():
    date = tz.utc_to_local(datetime.utcnow())
    # 如果是第二天刚开始的时候，需要去爬前一天最后几期的结果
    if date.hour == 0 and date.minute < 3:
        date -= timedelta(days=1)
    return date


def _parse_and_fill_168(data, refer):
    if not isinstance(data, dict) or data["errorCode"] != 0:
        print 'crawl xj_ssc error', data
        return False
    for d in data['result']['data']:
        phase = str(d['preDrawIssue'])
        number = d['preDrawCode']
        number = ''.join(number.split(','))
        if not number:
            print 'xj_ssc {} number is blank!'.format(phase)
            continue
        activity = fullfill_result(phase, number, refer)
        if activity:
            TimerEvent.submit(TIMER_EVENT_TYPE.CALC_STATS, {
                'number': number, 'term': phase,
                'activity_type': LOTTERY_TYPE.XJ_SSC}, tz.now_ts())
            TimerEvent.submit(TIMER_EVENT_TYPE.ACTIVITY_ANNOUNCE, {
                'number': number, 'term': phase,
                'activity_type': LOTTERY_TYPE.XJ_SSC}, tz.now_ts())

    return True


def crawl_history_from_168(date):
    url = URL % date.strftime('%Y-%m-%d')
    data = {
        'target': url
    }
    try:
        response = requests.post(PROXY_URL, data=json.dumps(data), timeout=5)
        resp_text = json.loads(response.text)
    except Exception as e:
        raise e
    if response.status_code != 200:
        raise RuntimeError('request error')
    _parse_and_fill_168(resp_text, url)


''' 在cron里面每天北京时间凌晨1点跑头一天
'''


def crawl_newest_from_168(base=URL):
    '''官网爬虫
    '''
    now = tz.local_now()
    refer = URL % now.strftime('%Y-%m-%d')
    url = refer if base == URL else base
    data = {
        'target': url
    }
    try:
        response = requests.post(PROXY_URL, data=json.dumps(data), timeout=5)
        resp_text = json.loads(response.text)
    except Exception:
        return False
    if response.status_code != 200:
        return False
    return _parse_and_fill_168(resp_text, refer)


def _parse_and_fill_500(content, refer):
    nodes = etree.fromstring(content)
    for node in nodes:
        attrib = node.attrib
        expect = attrib['expect']
        opentime = attrib['opentime']
        if len(expect) > 10:
            phase = expect[:8] + expect[9:]
        elif len(expect) == 10:
            phase = expect
        else:
            phase = opentime[:4] + expect[:4] + expect[-2:]
        number = attrib['opencode'].replace(',', '')
        activity = fullfill_result(phase, number, refer)
        if activity:
            TimerEvent.submit(TIMER_EVENT_TYPE.CALC_STATS, {
                'number': number, 'term': phase,
                'activity_type': LOTTERY_TYPE.XJ_SSC}, tz.now_ts())
            TimerEvent.submit(TIMER_EVENT_TYPE.ACTIVITY_ANNOUNCE, {
                'number': number, 'term': phase,
                'activity_type': LOTTERY_TYPE.XJ_SSC}, tz.now_ts())


def crawl_history_from_500(date):
    url = URL_500 % date.strftime('%Y%m%d')
    try:
        response = requests.get(url, timeout=10)
    except Exception as e:
        raise e
    if response.status_code != 200:
        raise RuntimeError('request error')
    _parse_and_fill_500(response.content, url)


def crawl_newest_from_500(now=None):
    now = now or tz.local_now()
    url = URL_500 % now.strftime('%Y%m%d')
    try:
        response = requests.get(url, timeout=5)
    except Exception:
        return now
    if response.status_code != 200:
        return now
    _parse_and_fill_500(response.content, url)


def main(d):
    while True:
        try:
            crawl_from_opencai(LOTTERY_TYPE.XJ_SSC)
        except Exception as e:
            logging.exception(u'%s error: %s' % (OPENCAI_LOTTERY_TYPES.get(LOTTERY_TYPE.XJ_SSC), e))
        if d:
            break
        time.sleep(60)


if __name__ == '__main__':
    date = sys.argv[1] if len(sys.argv) > 1 else None
    main(date)
