import requests
from lxml import etree
from bs4 import BeautifulSoup
import pandas as pd
import base64
headers = {
    'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36'}
datas=pd.DataFrame()
citys=["749","530","538","763","765","653"]#9580
for city_code in citys:
    for page in range(1,3):
        asd = str(base64.encodestring(bytes(str(page)+"-alice",'utf-8'))).replace("b", "").replace("'", "").replace(r"\n", "")
        a=requests.get("http://localhost:8080/page/"+asd+"/12/"+city_code ,headers=headers)
        if "layui-collapse layui-panel layui-article" not in a.text:
            break
        a1=BeautifulSoup(a.text,'lxml')
        a2=a1.find_all("legend",{"class":"center-to-head"})
        a2=BeautifulSoup(str(a2),'lxml')
        title=[]
        for i in a2.find_all("a"):
            title.append(i.text)
        a2=a1.find_all("fieldset",{"class":"layui-elem-field layui-field-title"})
        a2=BeautifulSoup(str(a2),'lxml')
        a2=a2.find_all("div",{"class":"layui-field-box "})
        a2=BeautifulSoup(str(a2),'lxml')
        xs=[]
        cs=[]
        nian=[]
        xve=[]
        gs=[]
        guim=[]
        for i in a2.find_all("div",{"class":"layui-row layui-hide"}):
            d=i.text.replace("\n", "$")
            d=d.split("$")
            xs.append(d[1])
            cs.append(d[2])
            nian.append(d[4])
            xve.append(d[5])
            gs.append(d[7])
            guim.append(d[9])
        times=[]
        for i in a1.find_all("span",{"class":"dtime"}):
            times.append(i.text)
        url="http://localhost:8080/getPosition?id="
        a2=a1.find_all("fieldset",{"class":"layui-elem-field layui-field-title"})
        a2=BeautifulSoup(str(a2),'lxml')
        a2=a2.find_all("legend",{"class":"center-to-head"})
        a2=BeautifulSoup(str(a2),'lxml')
        fl=[]
        yq=[]
        dd=[]
        for i in a2.find_all("a"):
            urls=url + str(base64.encodestring(bytes(i["id"], 'utf-8'))).replace("b", "").replace("'", "").replace(r"\n", "")
            b=requests.get(urls)
            b1=BeautifulSoup(b.text,'lxml')
            mark=0
            zk=[]
            for j in b1.find_all("div",{"class":"layui-card-body"}):
                if mark==0:
                    mark = 1
                    continue
                zk.append(j.text.strip())
            fl.append(zk[0])
            yq.append(zk[1])
            dd.append(zk[2])
        data=pd.DataFrame([title,xs,cs,nian,xve,gs,guim,fl,times,yq,dd]).T
        datas=pd.concat([datas,data])
        print(datas)
datas.to_csv("datas.csv",index=None)

# et=etree.HTML(a.text)
# d=et.xpath("//fieldset[@class='layui-elem-field layui-field-title']/legend[@class='layui-field-box ']/layui-row/text()")
# print(d)