# -*- coding: utf-8 -*-
import requests
from bs4 import BeautifulSoup
import openpyxl
import mysql.connector
from mysql.connector import Error

def scrape_baidu_search_results(query):
    """抓取百度搜索结果页面上的数据。"""
    print("正在访问百度...")
    url = f'https://www.baidu.com/s?wd={query}'  # 查询百度
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
    }
    
    response = requests.get(url, headers=headers)
    response.raise_for_status()  # 检查请求是否成功
    soup = BeautifulSoup(response.text, 'html.parser')
    
    results = soup.find_all('div', class_='result')  # 更新为百度搜索结果的类名
    data = []

    for result in results:
        info = extract_result_info(result)
        if info:
            data.append(info)

    return data

def extract_result_info(result):
    """从单个搜索结果中提取信息。"""
    title_tag = result.find('h3')
    title = title_tag.get_text(strip=True) if title_tag else '无标题'
    link = title_tag.a['href'] if title_tag and title_tag.a else '无链接'
    description = result.find('div', class_='c-abstract').get_text(strip=True) if result.find('div', class_='c-abstract') else '无描述'
    
    return [title, link, description]

def save_to_excel(data):
    """将数据保存到 Excel 文件中。"""
    wb = openpyxl.Workbook()
    sheet = wb.active
    sheet.title = "Baidu Search Results"
    sheet.append(["标题", "链接", "描述"])

    for entry in data:
        sheet.append(entry)

    wb.save('Baidu_Search_Results.xlsx')
    print("数据已保存到 Baidu_Search_Results.xlsx")

def read_from_excel_and_save_to_mysql():
    """从 Excel 文件读取数据并保存到 MySQL 数据库。"""
    print("正在读取 Excel 文件...")
    wb = openpyxl.load_workbook('Baidu_Search_Results.xlsx')
    sheet = wb.active

    data = [row for row in sheet.iter_rows(min_row=2, values_only=True)]

    try:
        with mysql.connector.connect(
            host='localhost',
            user='root',
            password='root',
            database='b23015122db'
        ) as conn:
            cursor = conn.cursor()
            print("正在保存数据到 MySQL 数据库...")

            cursor.execute('''
                CREATE TABLE IF NOT EXISTS baidu_search_results (
                    title VARCHAR(255),
                    link VARCHAR(10000),
                    description TEXT
                )
            ''')

            cursor.executemany('INSERT INTO baidu_search_results VALUES (%s, %s, %s)', data)
            conn.commit()
            print("数据已从 Excel 文件导入并保存到 MySQL 数据库中")
    except Error as e:
        print(f"数据库错误: {e}")

def main():
    query = "OpenAI"  # 替换为您要搜索的内容
    data = scrape_baidu_search_results(query)
    save_to_excel(data)
    read_from_excel_and_save_to_mysql()

if __name__ == "__main__":
    main()