import urllib.request
from bs4 import BeautifulSoup
import json,time,os
import numpy as np,pandas as pd
def getData(name,user):
    data = {"qylx": "",
        "mc": name,
        "xydm": "",
        "fr":user,
        "zsbh":"",
        "ctl00$MainContent$Button1":"搜索",
        "__VIEWSTATE":"KdzTe++Cw2yelPAszrVmxNh4cq0qsndpJ8a5O0i8UjuHVrpUOksvWr+SMZAAVmvtqgehkxC7Sl8+8Ahm+yyTR8jeAhw2/SemzgVzYmKI0ooYkEbg",
        "__VIEWSTATEGENERATOR":"E0B88F17",
        "__EVENTVALIDATION":"WWhLB+IooyCBQmOpRhHX4Cj3TSrQkCID1JNy7faWD5eZnnKUG7keJk3hTFdpBoPYTY8wVGHx8GuQbKTvg7QVNePsTSM+MOyVTuMza3HyseWcDy77iWjBb54+vElINIavxBcuYbhQmEJWI6NZ"
        }
    data_string=urllib.parse.urlencode(data)
    last_data=bytes(data_string,encoding='utf-8')
    req = urllib.request.Request("http://jst.sc.gov.cn/xxgx/Enterprise/eList.aspx", last_data)
    req.add_header("Referer", "http://jst.sc.gov.cn/xxgx/Enterprise/eList.aspx")
    req.add_header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
    req.add_header("User-Agent", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_2) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0.4 Safari/605.1.15")
    req.add_header("Cookie", "Hm_lpvt_6647b45850e12bf42ce7ed42bd381746=1577321613; Hm_lvt_6647b45850e12bf42ce7ed42bd381746=1577067412,1577265703,1577321613")
    req.add_header("Content-Type", "application/x-www-form-urlencoded",)
    try:
        f = urllib.request.urlopen(req,timeout=30)
    except:
        print(f"超时：{name}-{user}")
        time.sleep(1)
        return []
    else:
        time.sleep(1)
        soup = BeautifulSoup(f.read().decode('utf-8'),features="html.parser")
        datas = []
        for tr in soup.select(".table-search-list tr"):
            tds = []
            for th in tr.findAll("th"):
                tds.append(th.getText())
            for td in tr.findAll("td"):
                tds.append(td.getText())
                
            if len(tds) > 0:
                if tr.a is not None:
                    tds.append(tr.a["href"].split("=")[1])
                else:
                    tds.append("")
                datas.append(tds)
        return datas
    
def startLoad(start,fileIndex):
    with open("/Users/wangshuguan/Desktop/四川列表.txt") as f:
        df = pd.DataFrame(np.array(json.load(f)),columns=["name","user"])
        datas = []
        index = fileIndex
        if os.path.isfile(f"/Users/wangshuguan/Desktop/四川/四川_{index}.txt"):
            with open(f"/Users/wangshuguan/Desktop/四川/四川_{index}.txt") as file:
                for x in json.load(file):
                    datas.append(x)
        for x in range(start,len(df)):
            def _getData(_x):
                result = getData(df.name[_x],  "")
                if len(result) > 0:
                    if len(result) == 1:
#                         print(f"无数据：{df.name[_x]}-{df.user[_x]}")
                        return {}
                    elif len(result) == 2:
                        return result[1]
                    else:
                        return {}
                else:
                    return _getData(_x)
            ddddata = _getData(x)
            if len(ddddata) > 0:
                if len(datas) == 0:
                    print(f"{index} start:{x}")
                datas.append(ddddata)
            def save(filename, contents):
                fh = open(filename, 'w', encoding='utf-8')
                fh.write(contents)
                fh.close()
            save(f"/Users/wangshuguan/Desktop/四川/四川_{index}.txt", json.dumps(datas,ensure_ascii=False))
            print(f"{x}")
            if len(datas) >= 10:
                index = index+1
                datas = []
startLoad(21592,626)