import pymongo
import pymysql
import requests
from lxml import etree


class MySpider:
    def __init__(self):
        pass

    def save_mysql(self,datas):

        conn = pymysql.connect(user='ykq',passwd='123456',db='toscrapy',charset='utf8',
                               host='192.168.214.206',port=3306)

        cursor = conn.cursor()
        cursor.execute("delete from tag")
        cursor.execute("delete from item")

        item_id = 1001
        for data in datas:
            for item in data['datas']:
                cursor.execute("insert into item values (%s, %s, %s)", (item_id, item['content'], item['author']))

                tags = []
                # print(item)
                for tag in item['tags']:
                    tags.append([0, tag, item_id])

                # print(tags)
                cursor.executemany("insert into tag values (%s, %s, %s)", tags)
                item_id += 1


        conn.commit()
        conn.close()


    def get_headers(self):
        headers = {
             "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/142.0.0.0 Safari/537.36",

        }

        return headers
    def save_mongo(self,obj):
        client = pymongo.MongoClient('localhost', 27017)
        db = client['toscrape']
        collection = db['toscrape']
        collection.insert_many(obj)
        client.close()

    def get_all_datas(self):
        urls = [f'https://quotes.toscrape.com/page/{page}/' for page in range(1, 3)]
        all_datas = []
        for url in urls:
            page_data = self.get_data(url)
            all_datas.append(page_data)
        # print(all_datas)
        return all_datas

    def get_data(self,url):
        res = requests.get(url)
        tree = etree.HTML(res.text)
        items = tree.xpath('//div[@class="quote"]')
        datas = []
        for item in items:
            author = item.xpath('.//small[@class="author"]/text()')[0]
            content = item.xpath('.//span[@class="text"]/text()')[0][1:-1]
            tags = item.xpath('.//a[@class="tag"]/text()')
            datas.append({'author': author, 'content': content, 'tags': tags})

        return {
            'url':url,
            'datas':datas
        }

spider = MySpider()
datas = spider.get_all_datas()
# spider.save_mongo(datas)
spider.save_mysql(datas)