# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import pandas as pd

from sqlalchemy.sql.expression import select, insert
from sqlalchemy.sql.functions import max
from sqlalchemy.engine import create_engine
from Crawler.settings import *
from model import Lotterybase
from datetime import datetime
from scrapy import Spider


class CrawlerPipeline(object):
    def __init__(self):
        self.dbops = DBOps()
        self.content = []
        self.headers = Lotterybase.metadata

    def open_spider(self, spider):
        self.latest_date = self.dbops.get_top_date()

    def process_item(self, item, spider):
        for row in item['rec']:
            if row[0] == self.latest_date:
                self.close_spider(spider)
                break
            else:
                row[0] = datetime.strptime(row[0], '%Y-%m-%d')
                self.content.append(dict(zip(self.dbops.get_columns, row)))

    def close_spider(self, spider):
        if len(self.content) > 0:
            self.dbops.insert_records(self.content)
            spider.close('Done to insert latest record')
        else:
            spider.close('No new rec on target page.')


class DBOps:
    def __init__(self):
        self.__engine = create_engine(LOCAL_SQLLite)
        self.__columns = None

    def get_top_date(self):
        with self.__engine.connect() as conn:
            stmt = select(max(Lotterybase.date))
            res = conn.execute(stmt).scalar()
        return datetime.strftime(res, '%Y-%m-%d')

    def insert_records(self,values):
        with self.__engine.connect() as conn:
            stmt = insert(Lotterybase)
            conn.execute(stmt, values)
            conn.commit()


    @property
    def get_columns(self):
        return ['date', 'volume', 'red1', 'red2', 'red3', 'red4', 'red5', 'red6', 'blue']
