import http.cookiejar
import urllib.request
from urllib.parse import urlparse
from urllib.parse import ParseResult
from bs4 import BeautifulSoup
import  requests
from lxml import etree
#登录页的地址
url = "https://qcar.apiins.com/qcar/initQPQueryCond.do"
#登录的请求头部
header = {
    "User-Agent":"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.94 Safari/537.36",
    "Host":"qcar.apiins.com",
    "Cookie":"QCARJSESSIONID=6LmbhwVhh2KSWnLfQ2SvfGjLrH8RGbvnlx1QYHXk1q7NkPLmw5NR!-247818315; BIGipServercar_qcar_poool=1174538924.16671.0000; chinainsuranceJSESSIONID=CjKLhwVccK1hfswjsPSsfzpTQGRbq1XcM4PJwSgYqJhGVx2xQsn2!1030164234; BIGipServercar_core_pool=100797100.17951.0000"
}

#构建请求
request = urllib.request.Request(url = url,headers = header)

#创建cookie容器，重写URLlib的opener
#创建一个将cookie保存到本地文件的cookie实例
cookie = http.cookiejar.MozillaCookieJar("yatai.txt")

#创建一个URLlib2的容器来加载cookie
handler = urllib.request.HTTPCookieProcessor(cookie)


#创建一个可以保存cookie的opener
opener = urllib.request.build_opener(handler)

#用当前opener打开一个请求，会自动将响应的cookie记录到我们指定的文件当中

response = opener.open(request)

#保存cookie

cookie.save(ignore_expires = True,ignore_discard = True)
    #ignore_expires 如果存在，就覆盖cookie
    #ignore_discard 如果cookie也保存

content = response.read()

#构建HTML结构



search_url ="https://qcar.apiins.com/qcar/queryQuotePrice.do?queryType=query&licenseNo=%E7%B2%A4A4XU42"
#构建请求
request = urllib.request.Request(url = search_url,headers = header)


#创建一个URLlib2的容器来加载cookie
handler = urllib.request.HTTPCookieProcessor(cookie)


#创建一个可以保存cookie的opener
opener = urllib.request.build_opener(handler)

#用当前opener打开一个请求，会自动将响应的cookie记录到我们指定的文件当中

response = opener.open(request)

#保存cookie

cookie.save(ignore_expires = True,ignore_discard = True)
    #ignore_expires 如果存在，就覆盖cookie
    #ignore_discard 如果cookie也保存

content = response.read()

#soup = BeautifulSoup(content.decode('UTF8'), "html5lib")

#print(soup)

get_url ="https://qcar.apiins.com/qcar/quoteprice.do?preProposalTopNo=Z04020308000000201703069702&readonly=true"

#构建请求
request = urllib.request.Request(url = get_url,headers = header)


#创建一个URLlib2的容器来加载cookie
handler = urllib.request.HTTPCookieProcessor(cookie)


#创建一个可以保存cookie的opener
opener = urllib.request.build_opener(handler)

#用当前opener打开一个请求，会自动将响应的cookie记录到我们指定的文件当中

response = opener.open(request)

#保存cookie

cookie.save(ignore_expires = True,ignore_discard = True)
    #ignore_expires 如果存在，就覆盖cookie
    #ignore_discard 如果cookie也保存

content = response.read()

soup = BeautifulSoup(content.decode('UTF8'), "html5lib")

print(soup)