# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


import re
import threading

import pymysql
# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
from twisted.enterprise import adbapi
from twisted.internet import defer, reactor


class SsqPipeline:
    def process_item(self, item, spider):
        item['id'] = int(item['id'])

        matched_lottery_date = re.search(
            r'(\d+).+?(\d+).+?(\d+).+?', item['date'])
        item['date'] = "-".join(matched_lottery_date.groups()
                                ) if matched_lottery_date else "1997-01-01"
        # item['date'] = datetime.strptime(item['date'], '%Y-%M-%d').date()

        item['ident'] = item['ident']
        item['red_1'] = int(item['red_1'])
        item['red_2'] = int(item['red_2'])
        item['red_3'] = int(item['red_3'])
        item['red_4'] = int(item['red_4'])
        item['red_5'] = int(item['red_5'])
        item['red_6'] = int(item['red_6'])
        item['blue'] = int(item['blue'])
        return item


class MySQLPipeline:
    def open_spider(self, spider):
        db = spider.settings.get('MYSQL_DB_NAME', 'scrapy_default')
        host = spider.settings.get('MYSQL_HOST', 'localhost')
        port = spider.settings.get('MYSQL_PORT', 3306)
        user = spider.settings.get('MYSQL_USER', 'root')
        password = spider.settings.get('MYSQL_PASSWORD', 'root')
        self.table = spider.settings.get('MYSQL_TABLE', 'info')

        config = {
            "host": host,
            "port": port,
            "db": db,
            "user": user,
            "password": password,
            "charset": 'utf8',
        }
        # self.db_conn = pymysql.connect(**config)
        # self.db_cur = self.db_conn.cursor()
        self.dbpool = adbapi.ConnectionPool('pymysql', **config)

    def close_spider(self, spider):
        # self.db_conn.commit()
        # self.db_conn.close()
        self.dbpool.close()

    def process_item(self, item, spider):
        # self.insert_db(item)
        self.dbpool.runInteraction(self.insert_db, item)
        return item

    # def insert_db(self, item):
    #     values = dict(item).values()
    #     table = self.table
    #     print('In Thread:', threading.get_ident())
    #     sql = (f'INSERT INTO {table} (date, ident, red_1, red_2, red_3, '
    #            f'red_4, red_5, red_6, blue, times) VALUES (%s{", %s"*9})')
    #     self.db_cur.execute(sql, values)

    def insert_db(self, tx, item):
        values = dict(item).values()
        table = self.table
        sql = (f'INSERT INTO {table} (id, date, ident, red_1, red_2, red_3, '
               'red_4, red_5, red_6, blue, times) '
               'VALUES (%(id)s, %(date)s, %(ident)s, %(red_1)s, '
               '%(red_2)s, %(red_3)s, %(red_4)s, %(red_5)s, '
               '%(red_6)s, %(blue)s, %(times)s)')
        tx.execute(sql, dict(item))
