import requests
from pyquery import PyQuery as pq
import time
import pymysql

base_url = 'http://59.202.29.142'

host = 'localhost'
user = 'root'
password = 'root'
port = 3306
db_name = 'sment'
db = pymysql.connect(host=host, user=user,
                     password=password, port=port, db=db_name)
cursor = db.cursor()


def download_check_code(session):
    """下载验证码图片

    :session: session
    :returns: None

    """
    login_url = 'http://59.202.29.142/sment/login/doEnlogin'
    resp = session.get(login_url)
    # 获取验证码图片
    doc = pq(resp.text)
    js_check_img_url = doc('img.js-check-img').attr('src')
    js_check_img_url = base_url + js_check_img_url
    print('验证码图片路径为:', js_check_img_url)
    # 下载验证码图片
    # 不能这样, 这样相当于又发起一次请求
    #  urllib.request.urlretrieve(js_check_img_url, 'check_img.png')
    resp_img = session.get(js_check_img_url)
    with open('check_img.png', 'wb') as fp:
        fp.write(resp_img.content)


def get_token():
    """Get token

    :session: session
    :returns: token

    """
    return str(int(round(time.time() * 1000)))


def login(session):
    """login
    :returns: TODO

    """
    download_check_code(session)
    # 输入验证码
    check_code = input('输入验证码:')
    print(check_code)
    # 生成 13 位时间戳

    token = get_token()
    do_login = 'http://59.202.29.142/sment/login/doLogin'
    do_login = do_login + '?' + '_t=' + token
    print(do_login)

    post_data = {
        'username':	'loujiaoting',
        'password':	'1',
        'checkCode': check_code
    }

    resp = session.post(do_login, data=post_data)
    print(resp.json())


#  def get_data(session):
    #  """ Get data
#
    #  :session: TODO
    #  :returns: TODO
#
    #  """
#

def get_page(session, start, length, year, regOrg=''):
    """TODO: Get data
    :returns: TODO

    """
    data = {
        'start': str(start),
        'length': str(length),
        'search[value]': '',
        'search[regex]': 'false',
        'params[smYear]': str(year),
        'params[regState]': 'A,B,K,Q,X',
        'params[entType]': '',
        'params[industryCo]': '',
        'params[estDateStart]': '',
        'params[estDateEnd]': '',
        'params[cidRegNO]': '',
        'params[entName]': '',
        'params[leRep]': '',
        'params[regCapStart]': '',
        'params[regCapEnd]': '',
        'params[currency]': '156',
        'params[jgname]': '',
        'params[dom]': '',
        'params[regOrg]': regOrg,
        'params[localAdm]': '',
        'params[opScope]': '',
        'params[assGroStart]': '',
        'params[assGroEnd]': '',
        'params[maiBusIncStart]': '',
        'params[maiBusIncEnd]': '',
        'params[tscode]': '',
        'params[empNumStart]': '',
        'params[empNumEnd]': '',
        'params[shareholders]': '',
        'params[ratGroStart]': '',
        'params[ratGroEnd]': '',
        'params[proGroStart]': '',
        'params[proGroEnd]': '',
        'params[zbxchoose]': '25',
        'params[industryType]': ''
    }
    url = 'http://59.202.29.142/sment/smbaseinfoyr/selectYrSearchList'
    ajax_headers = {
        'Accept': 'application/json, text/javascript, */*; q=0.01',
        'Accept-Language': 'zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3\
        ,en;q=0.2',
        'Connection': 'keep-alive',
        'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
        'Host': '59.202.29.142',
        'Referer': 'http://59.202.29.142/sment/smbaseinfoyr/\
        toYrSearchListPage',
        'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) \
        AppleWebKit/537.36\
        (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
        'X-Requested-With': 'XMLHttpRequest'
    }
    try:
        resp = session.post(url, data=data, headers=ajax_headers)
        if resp.status_code == 200:
            return resp.json()
    except requests.ConnectionError as e:
        print('Error:', e.args)


def parse_page(json):
    """Parse json data

    :json: TODO
    :returns: TODO

    """
    if json:
        items = json.get('data')
        for item in items:
            ent = {}
            ent['cerNO'] = item.get('cerNO')
            ent['country'] = item.get('country')
            ent['dom'] = item.get('dom')
            ent['entName'] = item.get('entName')
            ent['entTypeName'] = item.get('entTypeName')
            ent['estDate'] = item.get('estDate')
            ent['leRep'] = item.get('leRep')
            ent['localAdmName'] = item.get('localAdmName')
            ent['regNO'] = item.get('regNO')
            ent['regOrgName'] = item.get('regOrgName')
            ent['regState'] = item.get('regState')
            ent['smYear'] = item.get('smYear')
            ent['tel'] = item.get('tel')
            yield ent


def save_to_mysql(item, count):
    """Save to mysql

    :item: dict
    :returns: None

    """
    table = 'sment'
    keys = ', '.join(item.keys())
    values = ', '.join(['%s'] * len(item))
    sql = 'insert into {table}({keys}) values({values})'.format(
        table=table, keys=keys, values=values)
    try:
        if cursor.execute(sql, tuple(item.values())):
            print(str(count), '- Saved to mysql sucessfully!')
            db.commit()
    except Exception as e:
        print('Failed:', e.args)
        db.rollback()


# 2017 150 总页数 11234: 1,685,025 项
# 1000 1m5s
# 10000 1m44s
# 10 万 6m30s
# 2017 - 1685025 - 169页
# 2016 - 1,395,504 - 140
# 2015 - 1,169,699 - 117
# 2014 -  990,011 - 100
# 2013 -

# 只义乌
# 2017 - 84,694, 9
# 2016 - 60,953, 7
# 2015 - 42,161, 5
# 2014 - 32,527, 4

def main():
    """main
    :returns: TODO

    """
    headers = {
        'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36\
        (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36'
    }
    session = requests.session()
    session.headers = headers

    login(session)
    #  years = [[2017, 169], [2016, 139], [2015, 116], [2014, 99]]
    years = [[2017, 9], [2016, 7], [2015, 5], [2014, 4]]
    pages = 0
    length = 10000  # 150
    regOrg = '330782'
    tmp_count = 0
    for year_list in years:
        year = year_list[0]
        pages = year_list[1]
        for page in range(0, pages):
            start = length*page
            json = get_page(session, start, length, year, regOrg)
            results = parse_page(json)
            for result in results:
                tmp_count += 1
                save_to_mysql(result, tmp_count)
    db.close()


if __name__ == "__main__":
    main()
