#coding:utf-8

from urllib import request
from urllib import parse
import json
import pymysql
from bs4 import BeautifulSoup

# 打开数据库连接
db = pymysql.connect("localhost","root","root","zgcl" )

# 使用 cursor() 方法创建一个游标对象 cursor
cursor = db.cursor()
# SQL 查询语句
sql = "SELECT * FROM move4 WHERE id>700 AND id<=1000"

idCard_area = {}
idCard_id = {}

try:
    # 执行SQL语句
    cursor.execute(sql)
    # 获取所有记录列表
    results = cursor.fetchall()
    for row in results:
        id = row[0]
        areaCode = row[1]
        idCard = row[2]

        idCard_area[idCard]=areaCode
        idCard_id[idCard]=id
except Exception as e:
    print (e)

headers = {
            "Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
            "Accept-Language":"zh-CN,zh;q=0.9",
            "Cache-Control":"max-age=0",
            "Connection":"keep-alive",
            "Content-Type":"application/x-www-form-urlencoded",
            "Host":"service.cdpf.org.cn",
            "Origin":"https://service.cdpf.org.cn",
            "Referer":"https://service.cdpf.org.cn/api?method=zclXzsp.cjrgl.list",
            "Upgrade-Insecure-Requests":"1",
            "User-Agent":"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36"
}

cookie_str = r'JSESSIONID=8A66F353F09D257EB89D4B4BAFE39C1F; SERVERID=cjrfwptsr5; _tj_token_id_=e679d657911d4eb6932f74b9c4606358-20180708204618'
url2 = 'https://service.cdpf.org.cn/api?method=zclXzsp.cjrgl.wfplist'
#日志
log_list = []

for idCard in idCard_area:

    print(str(idCard_id.get(idCard))+"------开始")

    data = {"catId":"",
            "deptId":"",
            "list_sort":"",
            "userId":"4e74f6e426a14654977937aaa9a1b027",
            "areaId":"",
            "areaName":"",
            "wfm":"cjr",
            "search.name":"",
            "search.idcard":idCard,
            "search.domicile_area":"",
            "idcard":idCard,
            "LIST_PAGE_SIZE":10}

    data = parse.urlencode(data).encode('utf-8')
    req2 =request.Request(url2,headers=headers, data=data)
    req2.add_header('cookie', cookie_str)
    page = request.urlopen(req2).read()

    page = page.decode('utf-8')
    soup=BeautifulSoup(page,features='html.parser')
    id =''
    try:
        id = soup.find(id="list").find_all('tr')[1].find_all('td')[0].find_all('input',class_='uid')[0].attrs.get('value')
    except Exception as e:
        print("错误"+str(idCard))


    ############################### move ##############################################
    if str(id).strip()=='':
        log_list.append(idCard)
    else:
        move_url = 'https://service.cdpf.org.cn/api?method=zclXzsp.cjrgl.saveMoveCjr'
        move_data={
            "userId":"4e74f6e426a14654977937aaa9a1b027",
            "ids":id,
            "areaId":idCard_area.get(idCard),
            "wfm":'wfp'
        }
        move_data = parse.urlencode(move_data).encode('utf-8')
        req_move =request.Request(move_url,headers=headers, data=move_data)
        req_move.add_header('cookie', cookie_str)

        response = request.urlopen(req_move).read()
        response = response.decode('utf-8')
    #print(response)
print('-----输出错误列表--')
print(log_list)