import random

import requests
from DrissionPage import ChromiumPage
from DrissionPage.common import Keys
import time
import mysql.connector
from bs4 import BeautifulSoup
from mysql.connector import cursor
import pymysql

connection = mysql.connector.connect(
    host="localhost",  # MySQL服务器地址
    user="root",   # 用户名
    password="123456",  # 密码
    database="bishe"  # 数据库名称
)

try:
    with connection.cursor() as cursor:
        # 查询所有href
        sql = "SELECT href FROM jingdongdianzishoubiaoshuju WHERE param_info IS NULL"
        cursor.execute(sql)
        results = cursor.fetchall()
        # print("到这了")
        # print(results)
        # cookie_str = '__jdu=1730557343709658022558; shshshfpa=26374b79-bb0c-aeab-ad96-ed4a7d660181-1730557344; shshshfpx=26374b79-bb0c-aeab-ad96-ed4a7d660181-1730557344; areaId=15; ipLoc-djd=15-1262-1267-56327; unpl=JF8EALBnNSttXhhWURtQGEIQHF1SW10JGB5UPDVWXQ1cHlYGTAcZQhJ7XlVdWRRKEx9uZBRUWFNIXQ4aCysSEXteU11bD00VB2xXVgQFDQ8WUUtBSUt-TFVcWVwJQhUEaGYNZG1bS2QFGjIbFBJKWVBYXw1JEQJnYAFdXFtIUAESMhoiF3ttZFldAU0UM25XBGQfDBddBB0DEl8QTV9VWlkOSRIBaWYNU1lRSlcGHwYSIhF7Xg; __jdv=229668127|baidu-search|t_262767352_baidusearch|cpc|699600836609_0_7b3e1c2a1f17410b8fbcb8a5d32f43a3|1746167288760; jsavif=1; PCSYCityID=CN_330000_330700_0; 3AB9D23F7A4B3C9B=3L45CRZ6WDAYOELQMXCZK2S2GIJNOMHWUQRV4QLSJFDRW323Z527TGIZKBV7PBIMTDVIRWIVD2GB264DBH5QWPZGUM; TrackID=1CckcKnjtWv3US2FiSNmGMl-gGDYR1ZNbu1BoIPzOWzWQiUk4LRdjFHUC9GPbRTTx153Q4xEvYlenVZsHvqedv_W5bOn9wYq7J4ZYDPfKf-U; thor=0685FD06D8F03915EB77035C05D97B0AE58627962E906D9E7D6AF3916797179A78181FD59376F72CCD225DB65699453DCADDC11D34ADA360A1ABD802DA6576A940A7A9A741AF7B7E725F6B861888444DF2D2C60E66B8B9212A9EB38524AB949658E2E3811003DEC818A20AE7482A30FB8DA2EB52FC64D0CFC290D85AA909C5B8; light_key=AASBKE7rOxgWQziEhC_QY6yaWjQ8xar-gtf2X8JLuyU9cJwFSAfNPXnmkbbBCAtGB7DpL0e4; pinId=8f-3uoMfZEw; pin=MaySul; unick=02v767sw6g299e; ceshi3.com=000; _tp=6IqkzZUXE2DL6QO50O5r6A%3D%3D; _pst=MaySul; cn=0; umc_count=1; jcap_dvzw_fp=keleLl65E8qvacBI4-Ldn87qQOLUY4vXQdlhnhzWrNYvkGV_97UODn-P3-JFI4AWRVJFd7qNhMsLehMkT55D9g==; 3AB9D23F7A4B3CSS=jdd033L45CRZ6WDAYOELQMXCZK2S2GIJNOMHWUQRV4QLSJFDRW323Z527TGIZKBV7PBIMTDVIRWIVD2GB264DBH5QWPZGUMAAAAMWR7FRHUIAAAAACPBZ2GF3ZIVA2QX; _gia_d=1; flash=3_g6X5Vut-Qh4X7LToHvqKhqk5uerSfFXiEGKRvk1XO0Z8ogMy0gg_J9IkGy7TgFQzrE7ILQe_iGJAiJRMBV4am2v22sDRJp8Du0m9XXu1ZcSmD5kxV7x9Wafwos7vVF7JENUH3ezg846tmvQWl729unMf7G3qUrzhjcA6D2H8; mt_xid=V2_52007VwMUUFxcVlwdTBtaBG8EFltcW1FeG0ApWlFvAREGXl1OWRdOHEAAZgFATg1ZVlkDSUoOBW4CRQBUUVFcL0oYXwF7AhZOXFtDWhpCHVQOYgciUG1YYl8WSB1eDWIFEVFaaFdbFk8%3D; token=733dc9d32c129c2c9fd36a51f256f1e6,3,970094; shshshfpb=BApXShk7DjPNAweVpSUtAHx9fhD3GoLBbBnIUdGtv9xJ1MiMOO4G2; sdtoken=AAbEsBpEIOVjqTAKCQtvQu172bx0gVsbHHdkBMf0EDLCNc7oPz-SPuAOKomKjkfrz6XBaEtvvrgdvNLf89fjgud8ERV2CE8UfwZ2jCo_TYbElsgSeaE8nO7_Bb1e1VoY1RC8ty9epuWiJns8; __jda=76161171.1730557343709658022558.1730557344.1745827971.1746167289.20; __jdb=76161171.17.1730557343709658022558|20.1746167289; __jdc=76161171'
        # cookies = {}
        # pairs = cookie_str.split('; ')
        # for pair in pairs:
        #     if '=' in pair:
        #         key, value = pair.split('=', 1)
        #         cookies[key] = value
        # 遍历每一个href
        for row in results:
            href = row[0]
            print("链接是："+href)
            try:
                # 访问网页
                response = requests.get(href, timeout=10, headers={
                    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36"
                })
                if response.status_code == 200:
                    # 解析网页
                    soup = BeautifulSoup(response.text, 'html.parser')
                    try:
                        p_parameter = soup.find(class_="p-parameter")

                    if p_parameter:
                        print(p_parameter)
                        text = p_parameter.get_text(strip=True)
                        # 去掉"更多参数>>"
                        cleaned_text = text.replace('更多参数>>', '')

                        print(f"链接: {href}")
                        print("清理后的内容：")
                        print(cleaned_text)
                        print("-" * 50)

                        # 保存清理后的内容到数据库，比如保存到 jingdongdiannaoshuju 的一个新字段 param_info
                        update_sql = "UPDATE jingdongdiannaoshuju SET param_info = %s WHERE href = %s"
                        cursor.execute(update_sql, (cleaned_text, href))
                        connection.commit()

                    else:
                        p_parameter = soup.find(class_="goods-base")
                        text = p_parameter.get_text(strip=True)
                        # 去掉"更多参数>>"
                        # cleaned_text = text.replace('更多参数>>', '')

                        print(f"链接: {href}")
                        print("清理后的内容：")
                        print(cleaned_text)
                        print("-" * 50)

                        # 保存清理后的内容到数据库，比如保存到 jingdongdiannaoshuju 的一个新字段 param_info
                        update_sql = "UPDATE jingdongdiannaoshuju SET param_info = %s WHERE href = %s"
                        cursor.execute(update_sql, (text, href))
                        connection.commit()

                else:
                    print(f"访问失败，状态码：{response.status_code}，链接：{href}")

                time.sleep(random.uniform(1, 5))

            except Exception as e:
                print(f"访问 {href} 出错：{e}")

finally:
    connection.close()