import requests
from bs4 import BeautifulSoup
import os
import pymysql
import random

list = []


# 数据爬取
def get_html(url):
    r = requests.get(url, headers=headers)
    r.encoding = "utf-8"
    return r.text


# 数据解析
def parse_data(html_doc):
    soup = BeautifulSoup(html_doc, "html.parser")
    root = "C://Users//dell//Desktop//python//p9//p9//img//"
    div_list = soup.select("body > div.main_w.clearfix > article > div.list_s2 > div.list_s2_content > div")
    for div in div_list:
        link = div.select("div.imgw > a.list_s2_item_img")[0].get("href")
        title = div.select("a.list_s2_item_info > strong.title")[0].string
        main_food = div.select("a.list_s2_item_info > span.sc")[0].string
        img = div.select("div.imgw > a.list_s2_item_img")[0].get("style")
        pic_img = img.split('(')[-1].split(')')[0]
        firstDict = {'title': title, 'link': link, 'main_food': main_food, 'pic_img': pic_img}
        print(pic_img)
        print(title)
        print(link)
        print(main_food)
        print("-" * 30)
        url = root + pic_img.split("/")[-1]
        print(url)
        try:
            if not os.path.exists(root):  # 判断磁盘制定文件夹是否存在，
                os.makedirs(root)  # 如果不存在就创建文件夹
            r = requests.get(pic_img)
            print("文件大小", len(r.content) / 1024, "kb")
            with open(url, "wb") as f:
                print("正在保存文件...")
                f.write(r.content)  # 向文件中写入二进制内容
                print("文件保存成功")
        except Exception as e:
            print("爬取失败", e)
        list.append(firstDict)
    print(list)


# 数据存储
def store_data():
    conn = pymysql.connect(
        host='127.0.0.1',
        user='root',
        password='123',  # 记得更改为你的用户名、密码、端口号
        port=3306,
        database='p9',
        charset="utf8"
    )
    my_cursor = conn.cursor()
    try:
        sql_insert = """
                insert into goods_tb values
                (NULL ,%s,%s,%s, FALSE ,1,%s);
                """
        price = round(random.uniform(10, 100), 2)
        for i in list:
            print(i)
            rows_affected = my_cursor.execute(sql_insert, (i['title'], price, i['pic_img'], i['main_food']))
            print('插入数据：', rows_affected)
            conn.commit()  # 数据发生变化要记得提交到数据库中

    except Exception as e:
        print(e)
        conn.rollback()  # 如果有异常就回滚
        conn.close()

if __name__ == "__main__":
    url = "https://www.meishij.net/caixi/chuancai/"
    headers = {
        'Accept': '*/*',
        'Accept-Language': 'en-US,en;q=0.8',
        'Cache-Control': 'max-age=0',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4896.127 Safari/537.36 Edg/100.0.1185.50',
        'Connection': 'keep-alive',
        'Referer': 'https://news.sina.com.cn/'
    }
    html_doc = get_html(url)
    parse_data(html_doc)
    store_data()

