import requests
import json
import pandas as pd
import time
import random

max_retries = 5
contentlist = []

# 等会儿把这里改成700，775，同时改一下最后的文件命名即可
for q in range(3001,4000):#2100,9900
    retries = 0
    while True:
        try:
            # url = 'https://xkz.cbirc.gov.cn/jr/kCEIwV/getLicence.do?'
            # https://xkz.cbirc.gov.cn/bx/OPtDdJ/getLicence.do?useState=7
            url = 'https://xkz.cbirc.gov.cn/bx/OPtJLM/getLicence.do?'
            data = {'useState':3,#状态 持有=3，退出=7，失控=9，近期设立=1
                    'start': q*10,#从第几条开始
                    'limit': 10#每页十条
                    }
            headers = {
                'Connection': 'keep-alive',
                'Accept': '*/*',
                'Accept-Encoding': 'gzip, deflate, br',
                'Accept-Language': 'zh-CN,zh;q=0.9',
                'Content-Length': '16',
                'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
                'Host': 'xkz.cbirc.gov.cn',
                'X-Requested-With':'XMLHttpRequest',
                'Origin': 'https://xkz.cbirc.gov.cn',
                'Referer': 'https://xkz.cbirc.gov.cn/jr/',
                'Sec-Ch-Ua': '"Google Chrome";v="113", "Chromium";v="113", "Not-A.Brand";v="24"',
                'Sec-Ch-Ua-Mobile': '?0',
                'Sec-Ch-Ua-Platform': '"Windows"',
                'Sec-Fetch-Dest': 'empty',
                'Sec-Fetch-Mode': 'cors',
                'Sec-Fetch-Site': 'same-origin',
                'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Safari/537.36',
                'Cookie':'isClick=true; yfx_c_g_u_id_10006849=_ck22102109245218135713153187778; yfx_mr_10006849=%3A%3Amarket_type_free_search%3A%3A%3A%3Abaid'
                         'u%3A%3A%3A%3A%3A%3A%3A%3Awww.baidu.com%3A%3A%3A%3Apmf_from_free_search; yfx_mr_f_10006849=%3A%3Amarket_type_free_search%3A%3A%3A%3Abaidu'
                         '%3A%3A%3A%3A%3A%3A%3A%3Awww.baidu.com%3A%3A%3A%3Apmf_from_free_search; yfx_key_10006849=; yfx_f_l_v_t_10006849=f_t_1632801034370__r_t_1684897'
                         '627362__v_t_1684897627362__r_c_5; JSESSIONID=0000-utXDNuzSbob1hLgncqXC5a:-1'
            }
            request = requests.post(url = url,data=data, headers = headers,timeout =10).text
            result = json.loads(request)
            print(result)
            print(q, result['success'])
            #time.sleep(random.uniform(50,60))
            if result['success'] == True:
                for i in range(len(result['datas'])):
                    content = []
                    id = result['datas'][i]['id']#每个银行唯一识别的id
                    flowNo = result['datas'][i]['flowNo']#流水号
                    certCode = result['datas'][i]['certCode']#机构编码
                    fullName = result['datas'][i]['fullName']#全称
                    setDate = result['datas'][i]['setDate']#成立日期
                    printDate = result['datas'][i]['date']#换发证或者是退出日期
                    useState = result['datas'][i]['useState']#状态 3 = 正常，7= 退出，9=失控
                    content.append(q)
                    content.append(id)
                    content.append(flowNo)
                    content.append(certCode)
                    content.append(fullName)
                    content.append(setDate)
                    content.append(printDate)
                    content.append(useState)
                    contentlist.append(content)
                time.sleep(random.uniform(7, 10))  # 政府的网址，不敢太造次，每爬一页休息10-15秒
                break
            else:
                retries += 1
                time.sleep(random.uniform(7, 10))
                if retries == max_retries:
                    time.sleep(320)
                    print('请求多次未果，休息320秒并继续')
                #time.sleep(random.uniform(7,10))  # 政府的网址，不敢太造次，每爬一页休息10-15秒
        except:
            retries += 1
            time.sleep(random.uniform(7, 10))
            if retries == max_retries:
                time.sleep(320)
                print('请求多次未果，休息320秒并继续')
            #time.sleep(random.uniform(7,10))

dataframe = pd.DataFrame(contentlist, columns=['page','id', 'flowNo', 'certCode', 'fullName', 'setDate', 'printDate', 'useState'])
# 等会儿把这里改成退出700-774即可
dataframe.to_excel(r"失控3001-4000.xlsx")
print('爬取成功')