import requests
from lxml import etree
import pymysql


class SaveData:
    def __init__(self):
        pass

    def get_data(self, page):
        url = f"https://quotes.toscrape.com/page/{page}/"
        data = []
        res = requests.get(url)
        tree = etree.HTML(res.text)
        items = tree.xpath("//div[@class='quote']")
        for item in items:
            content = item.xpath(".//span[@class='text']/text()")
            author = item.xpath(".//small[@class='author']/text()")
            tag = item.xpath("./div[@class='tags']/text()")
            data.append({"content": content[1:-1], "author": author, "tags": tag})
        return data

    def get_all_data(self):
        datas = []
        for i in range(1, 11):
            datas.append(self.get_data(i))
        return datas

    def save_data_mysql(self, datas):
        cone = pymysql.connect(
            user="guiji",
            password="123456",
            db="main",
        )
        cursor = cone.cursor()
        cursor.execute("delete from tag")
        cursor.execute("delete from item")
        item_id = 1001
        for data in datas:
            for item in data["datas"]:
                cursor.execute(
                    "insert into item values(%s, %s, %s)",
                    (item_id, item["content"], item["author"]),
                )
                # for tag in item['tags']:
                #     cur.execute('insert into tag values(%s, %s, %s)', args=(0, tag, item_id))

                tags = []
                for tag in item["tags"]:
                    tags.append([0, tag, item_id])
                cursor.executemany("insert into tag values(%s, %s, %s)", tags)
                item_id += 1

        cone.commit()
        cursor.close()
        cone.close()


spider = SaveData()
datas = spider.get_all_data()
spider.save_data_mysql(datas)
