import requests, random
from lxml import etree
import MySQLdb

# 连接数据库
conn = MySQLdb.connect(
    host="localhost",  # mysql所在的主机的ip
    port=3306,  # mysql的端口号
    user="root",  # mysql的用户名
    password="123456",  # mysql的密码
    db="reptiles",  # 要使用的库名
    charset="utf8",  # 连接中使用的字符集

)
cursor = conn.cursor(MySQLdb.cursors.DictCursor)  # 获取游标对象,里面可以为空,这样写是为了显示全面的信息

"""
通过尝试我们发现在网页内部的网页地址不会变,但有个东西会变 网页下一页所包含的点击事件里面的handler/guid的值会变,每次加4,动态更新
"""

# 首先获取百度搜索页面的真实地址(也许不同)
url = "https://www.baidu.com/s?ie=UTF-8&wd=%E5%A4%B1%E4%BF%A1%E4%BA%BA"  # 真实地址

# 爬取源码结果不对(和之前遭遇的情况不同),尝试伪装user-agent试试,测试结果,果然是这个原因
user_agent = [
    "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50",
    "Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50",
    "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0;",
    "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)",
    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
    "Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
    "Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11",
    "Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11",
    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11",
    "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Maxthon 2.0)",
    "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)"
]
user_agent = random.choice(user_agent)

cookie = "BAIDUID=64F212BEF1C818A020A097E0AF99A567:FG=1; BIDUPSID=64F212BEF1C818A020A097E0AF99A567; PSTM=1588495682; BD_UPN=19314753; BDUSS=0RtY0F4aUtoc3lHc3J2c0dkTWlzQ0ROLXRIaWZKMXo3MHJUWjRtU0JtWlFZU1ZmRVFBQUFBJCQAAAAAAAAAAAEAAADeYvzqAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFDU~V5Q1P1eM; MCITY=-256%3A; BDORZ=B490B5EBF6F3CD402E515D22BCDA1598; delPer=0; BD_CK_SAM=1; PSINO=6; __yjsv5_shitong=1.0_7_8b2e8ce73b4dc174bc3fc7fbc30c9c2df4d8_300_1595935340884_171.215.70.147_6702a6fa; yjs_js_security_passport=2d472fe727a718fedd60149cc41ad1d94ddd53e3_1595935341_js; BD_HOME=1; BDRCVFR[feWj1Vr5u3D]=mk3SLVN4HKm; sug=3; sugstore=0; ORIGIN=2; bdime=0; COOKIE_SESSION=24_0_9_9_0_0_0_0_9_0_0_0_0_0_0_0_0_0_1595937566%7C9%23137954_248_1595813707%7C9; H_PS_PSSID=32294_1458_32361_31660_32045_32398_32405_31525_32116_31709; H_PS_645EC=d83fhSkb8%2B5kleIGqgQ%2BY1z%2F6I22wr7P115wvtFatAxMfWEHbs1pl06KVx3hKQATKYDl; BDSVRTM=0"
res = requests.get(url, headers={"user-agent": user_agent}).text

# 此时已经获取了完整的网页信息,使用xpath进行爬取相信息
ele = etree.HTML(res)    # 构造element对象信息信息匹配

# 姓名
name = ele.xpath("//span[@class='op_trust_name']/text()")
# 身份证号   (此信息展示的时候就是模糊的,暂时不用管)
id_card = ele.xpath("//span[@class='op_trust_fl op_trust_papers']/text()")
# 执行法院
court_execution = ele.xpath("//div[@class='op_trust_info']//tr[1]/td[2]//text()")
# 省份   # 获取不到,网页内部加了东西
province = ele.xpath("//div[@class='op_trust_info']//tr[2]/td[2]//text()")
# 案号
case_number = ele.xpath("//div[@class='op_trust_info']//tr[3]/td[2]//text()")
# 生效法律文书确定的义务
law_obligation = ele.xpath("//div[@class='op_trust_info']//tr[4]/td[2]//text()")
# 被执行人的履行情况  (数据有问题)
perform = ele.xpath("//div[@class='op_trust_info']//tr[5]/td[2]//text()")
# 失信被执行人行为具体情形
behavior = ele.xpath("//div[@class='op_trust_info']//tr[6]/td[2]//text()")
# 发布时间
release_time = ele.xpath("//div[@class='op_trust_info']//tr[7]/td[2]//text()")

# 然后将数据分类打包
print(release_time)
print("----------------")
for i in range(10):
    list1 = [str(name[i])+","+str(id_card[i])+","+str(court_execution[i])+","+str(case_number[i])+","+str(behavior[i])+","+str(release_time[i])]
    list = (",".join(list1))
    list2 = list.split(",")
    print(list2)
    print(type(list2))
    sql = "insert into baidu_dishonesty values (%s,%s,%s,%s,%s,%s)"
    last = cursor.execute(sql, list2)
    conn.commit()
cursor.close()
conn.close()


