# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html

import os
import sqlite3
from urllib.parse import unquote

from MySpider.items import DataItem

currentPath = os.path.dirname(__file__)
rootPath = currentPath + '/..'


class RenthousePipeline(object):
    '''
    处理item中的值
    '''
    def process_item(self, item, spider):
        if isinstance(item, DataItem):
            self.parse_url(item, 'url1_value')
            self.parse_url(item, 'url2_value')
            self.parse_url(item, 'url3_value')
        return item

    def parse_url(self, item, url_key):
        if item.get(url_key):
            item.__setitem__(url_key, unquote(item[url_key]).replace('https://www.douban.com/link2/?url=', ''))


class DbPipeline(object):
    '''存入数据库'''

    # pipeline默认调用
    def process_item(self, item, spider):
        save = False
        if item.get('url1_value') is not None:
            save = True
        elif item.get('url2_value') is not None:
            save = True
        elif item.get('url3_value') is not None:
            save = True

        if save:
            conn = sqlite3.connect(rootPath + '/server/db.sqlite3')
            c = conn.cursor()
            self.c = c

            c.execute('INSERT INTO video_video(video) VALUES (?)', (item.get('video'), ))
            video_id = c.lastrowid

            self.insert_url(video_id, item, '1')
            self.insert_url(video_id, item, '2')
            self.insert_url(video_id, item, '3')

            conn.commit()
            conn.close()

    def insert_url(self, video_id, item, index):
        name_key = 'url' + index + '_name'
        value_key = 'url' + index + '_value'
        if item.get(value_key) is not None:
            self.c.execute('INSERT INTO video_url(video_id, name, url) VALUES (?, ?, ?)',
                      (video_id, item.get(name_key), item.get(value_key)))
