# -*- coding: utf-8 -*-

# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html

import scrapy
from scrapy.loader import ItemLoader
from scrapy.loader.processors import MapCompose, TakeFirst, Join
from w3lib.html import remove_tags


class LabcrawlerItemLoader(ItemLoader):
    #自定义itemloader
    default_output_processor = TakeFirst()


def handle_strip(value):
    return value.strip()


def handle_replace(value):
    return value.replace("...展开全文c", "")


class WeiboItem(scrapy.Item):
    crawler_task_id = scrapy.Field()
    item_id = scrapy.Field()
    page = scrapy.Field()
    url = scrapy.Field()
    url_md5 = scrapy.Field()
    content = scrapy.Field(input_processor=MapCompose(remove_tags, handle_replace, handle_strip))
    created_time = scrapy.Field()
    crawled_time = scrapy.Field()

    def get_insert_sql(self):
        insert_sql = """
            insert into crawled_items(crawler_task_id, item_id, page, url, url_md5, content, created_time, crawled_time)
            VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
        """

        params = (self["crawler_task_id"], self["item_id"], self["page"], self["url"], self["url_md5"], self["content"],\
                  self["created_time"], self["crawled_time"])

        return insert_sql, params


class TBKeyListItem(scrapy.Item):
    crawler_task_id = scrapy.Field()
    level = scrapy.Field()
    hotword = scrapy.Field()
    total = scrapy.Field()
    crawled_time = scrapy.Field()

    def get_insert_sql(self):
        insert_sql = """
            insert into crawled_tbkeylist(crawler_task_id, level, hotword, total, crawled_time)
            VALUES (%s, %s, %s, %s, %s)
        """

        params = (self["crawler_task_id"], self["level"], self["hotword"], self["total"], self["crawled_time"])

        return insert_sql, params


class TbItemPropItem(scrapy.Item):
    crawler_task_id = scrapy.Field()
    item_id = scrapy.Field()
    title = scrapy.Field()
    pic_url = scrapy.Field()
    price = scrapy.Field()
    sales = scrapy.Field()
    location = scrapy.Field()
    is_tmall = scrapy.Field()
    nick = scrapy.Field()
    credit = scrapy.Field()
    crawled_time = scrapy.Field()

    def get_insert_sql(self):
        insert_sql = """
            insert into crawled_tbitemprop(crawler_task_id, item_id, title, pic_url, price, sales, location, is_tmall, 
            nick, credit, crawled_time) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
        """

        params = (self["crawler_task_id"], self["item_id"], self["title"], self["pic_url"], self["price"], self["sales"],
                  self["location"], self["is_tmall"], self["nick"], self["credit"], self["crawled_time"])

        return insert_sql, params


class TbJuLiangfanItem(scrapy.Item):
    crawler_task_id = scrapy.Field()
    item_id = scrapy.Field()
    title = scrapy.Field()
    pic_url = scrapy.Field()
    price = scrapy.Field()
    sales = scrapy.Field()
    floor = scrapy.Field()
    os_time = scrapy.Field()
    crawled_time = scrapy.Field()

    def get_insert_sql(self):
        insert_sql = """
            insert into crawled_tbjuliangfan(crawler_task_id, item_id, title, pic_url, price, sales, floor, os_time, 
            crawled_time) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
        """

        params = (self["crawler_task_id"], self["item_id"], self["title"], self["pic_url"], self["price"], self["sales"],
                  self["floor"], self["os_time"], self["crawled_time"])

        return insert_sql, params


class TbQianggouItem(scrapy.Item):
    crawler_task_id = scrapy.Field()
    item_id = scrapy.Field()
    title = scrapy.Field()
    selling_point = scrapy.Field()
    pic_url = scrapy.Field()
    price = scrapy.Field()
    sold_num = scrapy.Field()
    sold_rate = scrapy.Field()
    os_time = scrapy.Field()
    crawled_time = scrapy.Field()

    def get_insert_sql(self):
        insert_sql = """
            insert into crawled_tbqianggou(crawler_task_id, item_id, title, selling_point, pic_url, price, sold_num, 
            sold_rate, os_time, crawled_time) VALUES (%s, %s,  %s, %s, %s, %s, %s, %s, %s, %s)
        """

        params = (self["crawler_task_id"], self["item_id"], self["title"], self["selling_point"], self["pic_url"],
                  self["price"], self["sold_num"], self["sold_rate"], self["os_time"], self["crawled_time"])

        return insert_sql, params


class TbItemRateItem(scrapy.Item):
    crawler_task_id = scrapy.Field()
    item_id = scrapy.Field()
    rate_id = scrapy.Field()
    title = scrapy.Field()
    pic_url = scrapy.Field()
    price = scrapy.Field()
    content = scrapy.Field()
    crawled_time = scrapy.Field()

    def get_insert_sql(self):
        insert_sql = """
            insert into crawled_tbitemrate(crawler_task_id, item_id, rate_id, title, pic_url, price, content, crawled_time) 
            VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
        """

        params = (self["crawler_task_id"], self["item_id"], self["rate_id"], self["title"], self["pic_url"], self["price"],
                  self["content"], self["crawled_time"])

        return insert_sql, params