# import urllib.request
# # from urllib import request
# from urllib import error
#
# try:
#     response = urllib.request.urlopen("http://www.baidu.com")
# except error.HTTPError as e:
#     print(e.reason, e.code, e.headers, sep="\r\n")
# except error.URLError as e:
#     print(e.reason)
# else:
#     print(response.read())
# request = urllib.request.Request("http://www.baidu.com")
# response = urllib.request.urlopen(request)
# print(response.read())

# from urllib.parse import parse_qs, parse_qsl, quote, unquote, quote_plus

# query = "k1=v1&k2=v2"
# print(parse_qs(query))
# print(parse_qsl(query))

# key = "美  女"
# url_coding = quote(key)
# url_coding_plus = quote_plus(key)
# print(url_coding)
# print(url_coding_plus)
# origin = unquote(url_coding)
# print(origin)

# from urllib.robotparser import RobotFileParser
# rp = RobotFileParser()
# rp.set_url("http://www.baidu.com/robots.txt")
# rp.read()
# print(rp.can_fetch("*", "http://www.baidu.com/xxx"))

import requests
# s = requests.Session()
# s.get("http://httpbin.org/cookies/set/num/1234")  # 设置cookie
# resp = s.get("http://httpbin.org/cookies")
# print(resp.text)
# print(requests.utils.dict_from_cookiejar(resp.cookies))

# from requests import urllib3
# urllib3.disable_warnings()
# resp = requests.get("https://www.12306.cn", verify=False)
# print(resp.status_code)
# import logging
# logging.captureWarnings(True)
# resp = requests.get("https://www.12306.cn", verify=False)
# print(resp.status_code)
from requests import Request, Session
url = "http://xxx"
data = {
    "name": "data"
}
headers = {
    "User-Agent": "yyy"
}
s = Session()
req = Request("POST", url, data=data, headers=headers)
prep = s.prepare_request(req)
resp = s.send(prep)
print(resp.text)
