# -*- coding: utf-8 -*-
import pymysql
import requests
from bs4 import BeautifulSoup

mysql_obj = pymysql.connect(host='localhost', user='root', password='chyyety7', db='test', port=3306)

print("数据库连接成功")
cur_obj = mysql_obj.cursor()

headers = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36 Edg/126.0.0.0',
    'Accept-Encoding': "gzip, deflate",
    'Connection': "keep-alive",
    'cache-control': "no-cache"
}

# ... (之前的代码保持不变)

try:
    for start_num in range(1, 108):
        resp = requests.get(f"https://www.zyctd.com/jh{start_num}.html", headers=headers)  # 访问网页
        resp.encoding = 'utf-8'
        soup = BeautifulSoup(resp.text, "html.parser")  # 使用BeautifulSoup库解析返回的网页数据
        # 找到包含数据的div元素

        data_all = []
        tian_three = soup.find("div", {"class": "lay6"})  # 通过浏览器确定元素名，按div类型和class名查找
        lishitable_content = tian_three.find_all("ul")

        for i in lishitable_content:
            lishi_div = i.find_all("span")
            data = []
            for j in lishi_div:
                data.append(j.text)
            data_all.append(data)  # 爬取的数据存入data_all列表
        ##print(data_all)

        for tianqi in data_all:
            values = tianqi[:10]
            placeholders = ', '.join(['%s'] * len(values))  # 创建占位符列表
            insertsql = f"INSERT INTO herbplace (品名, herbname,别名,othername,生产时间,time,产区分布, herbplace,品种特点,herbtrait) VALUES ({placeholders})"
            cur_obj.execute(insertsql, values)  # 使用参数化查询


    mysql_obj.commit()
except Exception as e:
    print(f"发生错误: {e}")
    mysql_obj.rollback()  # 如果发生错误，回滚事务
finally:
    cur_obj.close()
    mysql_obj.close()