# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import requests
from bs4 import BeautifulSoup
import pymysql
import redis
import jieba


class MytextPipeline(object):

    def __init__(self):
        self.id = 1
        self.db = pymysql.connect(host='47.98.50.245', user='zgf', password='ZGFroot1234@', database='scrapy_text',
                                  charset='utf8')
        self.cursor = self.db.cursor()
        self.redisClient = redis.Redis(host='47.98.50.245', port=6379, password='root')

    def process_item(self, Text, spider):
        print('----' * 50)
        print(Text)
        print('----' * 50)
        book_id = Text['book_id']
        book_name = Text['book_name']
        book_auto = Text['book_auto']
        book_link = Text['book_link']
        book_url = Text['book_url']
        book_img = Text['book_img']
        #url = Text['url']
        sql_insert = "INSERT INTO text_scrapy(book_id,book_name,book_auto,book_link,book_url,book_img) VALUES ('%s','%s','%s','%s','%s','%s')" % (
            book_id, book_name, book_auto, book_link, book_url, book_img)
        # sql_updata = "UPDATE text_scrapy SET title='%s',url='%s' WHERE link='%s'" % (title,url, )

        if book_url:
            self.cursor.execute(sql_insert)
            last_id = self.cursor.lastrowid
            self.db.commit()

            # 分词
            words = jieba.cut(book_name)
            for word in words:
                self.redisClient.sadd(word, last_id)

