# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
import openpyxl
import re

class TaobaoPipeline:
    def __init__(self):
        self.wb = openpyxl.Workbook()  # 创建工作簿
        self.ws = self.wb.active  # 拿到默认激活的工作表
        self.ws.title = 'sheet1'  # 工作表名称
        self.ws.append(('title', 'brand', 'category', 'image', 'gallery', 'color', 'size', 'waist','length','sku', 'price', 'sprice',
                        'description', 'PageUrl',))  # 表头

    def close_spider(self, spider):  # 爬虫停止运行的时候执行该方法,钩子函数，自己执行不需要调用
        self.wb.save('sty.xlsx')

    def process_item(self, item, spider):
        title = item.get('title',"") # 如果字典中的title值为空的话，就把''（空值）赋给title变量,写法一
        brand = item.get('brand',"")
        category = item.get('category',"")
        image = item.get('image',"")
        gallery = item.get("gallery","")
        color = item.get("color","")
        size = item.get("size","")
        sku = item.get("sku","")
        price = item.get("price","")  # 如果字典中的title值为空的话，就把''（空值）赋给title变量，写法二
        sprice = item.get("sprice","")
        description = item.get("description","")
        description = re.sub(r'^(https:\S+)', '', description)
        description = re.sub(r'^(http:\S+)', '', description)
        description = re.sub(r'[a-zA-Z]+://[^\s]*', '', description)
        PageUrl = item.get("PageUrl","")
        waist = item.get("waist","")
        length = item.get('length',"")
        self.ws.append(
            (title, brand, category, image, gallery, color, size,waist,length, sku, price, sprice, description, PageUrl))  #
        return item
# class MySQLPipeline(object):
#     # 打开数据库
#     def open_spider(self, spider):
#         db = spider.settings.get('MYSQL_DB_NAME', 'taobao')
#         host = spider.settings.get('MYSQL_HOST', 'localhost')
#         port = spider.settings.get('MYSQL_PORT', 3306)
#         user = spider.settings.get('MYSQL_USER', 'root')
#         passwd = spider.settings.get('MYSQL_PASSWORD', '1940952395')
#         self.db_conn = pymysql.connect(host=host, port=port, db=db, user=user, passwd=passwd, charset='utf8')
#         self.db_cur = self.db_conn.cursor()
#     # 关闭数据库
#     def close_spider(self, spider):
#         self.db_conn.commit()
#         self.db_conn.close()
#     # 对数据进行处理
#     def process_item(self, item, spider):
#         self.insert_db(item)
#         return item
#     # 插入数据
#     def insert_db(self, item):
#         title = item.get('title', '')  # 如果字典中的title值为空的话，就把''（空值）赋给title变量,写法一
#         price = item.get('price') or '0'  # 如果字典中的title值为空的话，就把''（空值）赋给title变量，写法二
#         sole = item.get('sole', '')
#         shop = item.get('shop', '')
#         address = item.get('address', '')
#         print(title+"------"+price+"-----------------"+sole+"------------")
#         values = (
#             title,
#             price,
#             sole,
#             shop,
#             address
#         )
#         sql = 'INSERT INTO shops(title,price,sole,shop,address) VALUES(%s,%s,%s,%s,%s)'
#         self.db_cur.execute(sql, values)
