import datetime
import hashlib
import json
import re

import sqlite3
import scrapy
from dateutil.relativedelta import relativedelta


class PixivSpider(scrapy.Spider):
    name = 'pixiv'
    allowed_domains = ['pixiv.net']

    def start_requests(self):
        target_url = 'https://www.pixiv.net/ranking.php?mode=daily&content=illust&p=1&format=json'
        urls = list()
        for count in range(1, 11):
            target_url = f'https://www.pixiv.net/ranking.php?mode=daily&content=illust&p={count}&format=json'
            urls.append(target_url)
        for url in urls:
            print(f'正在请求第:{urls.index(url)}个链接\nurl：{url}')
            result = scrapy.Request(url=url, callback=self.parse)
            yield result

    def parse(self, response):
        data_json = response.body
        data_list = json.loads(data_json).get('contents')

        for data in data_list:
            parsed_data = dict()

            parsed_data['title'] = data.get('title', '')
            parsed_data['author'] = data.get('user_name', '')
            sir_two_uid = data.get('illust_id', '')
            parsed_data['target_url'] = f'https://www.pixiv.net/artworks/{sir_two_uid}'
            origin_url = str(data.get('url'))
            postfix = re.findall('1200\\.(\\w.*)', origin_url)
            target = re.findall('/img/\\d{4}/\\d{2}/\\d{2}.*?_p0', origin_url)
            parsed_data['header'] = ''
            # 时间处理
            # 获取秒
            second = re.findall('(\\d{2})/\\d{8}_p0', target[0])[0]
            parsed_data['publish_date'] = str(data.get('date', datetime.datetime.now())).replace('年', '-').replace('月', '-').replace('日', '') + ':' + second
            # 如果图片格式不为jpg 对应的header
            time = datetime.datetime.strptime(parsed_data['publish_date'], "%Y-%m-%d %H:%M:%S")
            time = time + relativedelta(hours=-9)
            gmt_format = '%a, %d %b %Y %H:%M:%S GMT'
            time = time.strftime(gmt_format)
            parsed_data['header'] = time

            img_url = f'https://i.pximg.net/img-original{target[0]}.{postfix[0]}'
            parsed_data['img_url'] = img_url

            parsed_data['uid'] = hashlib.md5(parsed_data['target_url'].encode('utf-8')).hexdigest()
            parsed_data['rank'] = data.get('rank', '')

            parsed_data['tags'] = data.get('tags', list())
            illust_content_type = data.get('illust_content_type', '')

            parsed_data['sexual'] = illust_content_type.get('sexual', '')
            parsed_data['lo'] = 1 if illust_content_type.get('lo', '') else 0
            parsed_data['grotesque'] = 1 if illust_content_type.get('grotesque', '') else 0
            parsed_data['violent'] = 1 if illust_content_type.get('violent', '') else 0
            parsed_data['homosexual'] = 1 if illust_content_type.get('homosexual', '') else 0
            parsed_data['drug'] = 1 if illust_content_type.get('drug', '') else 0
            parsed_data['thoughts'] = 1 if illust_content_type.get('thoughts', '') else 0
            parsed_data['antisocial'] = 1 if illust_content_type.get('antisocial', '') else 0
            parsed_data['religion'] = 1 if illust_content_type.get('religion', '') else 0
            parsed_data['original'] = 1 if illust_content_type.get('original', '') else 0
            parsed_data['furry'] = 1 if illust_content_type.get('furry', '') else 0
            parsed_data['bl'] = 1 if illust_content_type.get('bl', '') else 0
            parsed_data['yuri'] = 1 if illust_content_type.get('yuri', '') else 0

            result = self.write_sir_two_info(parsed_data)
            print(f'正在入库数据 >> {parsed_data.get("title")} >> result = {result}')
            if result == 0:
                self.write_tag(parsed_data)

    def write_sir_two_info(self, data):
        sir_two_database = sqlite3.connect("SirTwoDB.db")
        cu = sir_two_database.cursor()
        insert_sql = "INSERT INTO SirTwoInfo" \
                     "(title, publish_date, author, target_url, rank, " \
                     "sexual, lo, grotesque, violent, homosexual, drug, " \
                     "thoughts, antisocial, religion, original, furry, bl, " \
                     "yuri, uid, img_url, header)VALUES " \
                     f"('{data.get('title')}', '{data.get('publish_date')}', '{data.get('author')}', '{data.get('target_url')}', '{data.get('rank')}', " \
                     f"'{data.get('sexual')}','{data.get('lo')}', '{data.get('grotesque')}', '{data.get('violent')}', '{data.get('homosexual')}', " \
                     f"'{data.get('drug')}', '{data.get('thoughts')}', '{data.get('antisocial')}', '{data.get('religion')}', '{data.get('original')}'," \
                     f"'{data.get('furry')}', '{data.get('bl')}', '{data.get('yuri')}', '{data.get('uid')}', '{data.get('img_url')}', " \
                     f"'{data.get('header')}')"
        try:
            cu.execute(insert_sql)
        except BaseException as e:
            sir_two_database.rollback()
            if str(e.args[0]).__contains__('UNIQUE constraint failed'):
                return 1
            return 2
        finally:
            sir_two_database.commit()
        self.write_tag(data)
        return 0

    def write_tag(self, data):
        tags = list()
        for tag in data.get('tags'):
            tags.append('\'' + tag + '\'')
        if len(tags) < 1:
            return

        sql = 'INSERT INTO SirTwoTags'
        tags_param_name_list = list()
        for tag_count in range(1, len(tags)+1):
            tags_param_name_list.append(f'tag{tag_count}')
        param = ' (sir_two_id,' + ",".join(tags_param_name_list) + ')'

        param_value = f' VALUES("{data.get("uid")}",' + ",".join(tags) + ')'

        sql = sql + param + param_value

        try:
            sir_two_database = sqlite3.connect("SirTwoDB.db")
            cu = sir_two_database.cursor()
            cu.execute(sql)
        except BaseException as e:
            sir_two_database.rollback()
            if str(e.args[0]).__contains__('UNIQUE constraint failed'):
                return 1
            return 2
        finally:
            sir_two_database.commit()
        self.write_tag(data)
        return 0



