import scrapy
import os, abc, logging, sys
from scrapy.spiders import CrawlSpider
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from redis import StrictRedis


class CommonSpider(scrapy.Spider, metaclass=abc.ABCMeta):
    @classmethod
    def from_crawler(cls, crawler, *args, **kwargs):
        spider = cls(*args, **kwargs)
        spider._set_crawler(crawler)
        ##网络检查
        NET_STATUS_HOST = crawler.settings.get('NET_STATUS_HOST')
        NET_STATUS_SEND_COUNT = crawler.settings.get('NET_STATUS_SEND_COUNT')
        if sys.platform == 'win32':
            net_status = os.system(f'ping {NET_STATUS_HOST} -n {NET_STATUS_SEND_COUNT}')
        else:
            net_status = os.system(f'ping {NET_STATUS_HOST} -c {NET_STATUS_SEND_COUNT}')
        if net_status:
            raise logging.error(cls.name + ':网络中断，请检查是否联网！')
        # ##配置数据库
        # try:
        #     MYSQL_HOST = crawler.settings.get('MYSQL_HOST')
        #     MYSQL_PORT = crawler.settings.get('MYSQL_PORT')
        #     MYSQL_DB = crawler.settings.get('MYSQL_DB')
        #     MYSQL_USER = crawler.settings.get('MYSQL_USER')
        #     MYSQL_PASSWD = crawler.settings.get('MYSQL_PASSWD')
        #     engine = create_engine(f'mysql+pymysql://{MYSQL_USER}:{MYSQL_PASSWD}@{MYSQL_HOST}:{MYSQL_PORT}/{MYSQL_DB}')
        #     Session = sessionmaker(bind=engine)
        #     cls.db = Session()
        # except:
        #     logging.error(cls.name + ":启动数据库失败")
        ##配置redis
        try:
            REDIS_HOST = crawler.settings.get('REDIS_HOST')
            REDIS_PORT = crawler.settings.get('REDIS_PORT')
            REDIS_DB = crawler.settings.get('REDIS_DB')
            cls.redis = StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
        except:
            logging.error(cls.name + ":启动REDIS失败！")
        return spider

class CommonCrawlSpider(CrawlSpider, metaclass=abc.ABCMeta):
    @classmethod
    def from_crawler(cls, crawler, *args, **kwargs):
        spider = super().from_crawler(crawler, *args, **kwargs)
        spider._follow_links = crawler.settings.getbool('CRAWLSPIDER_FOLLOW_LINKS', True)
        ##网络检查
        NET_STATUS_HOST = crawler.settings.get('NET_STATUS_HOST')
        NET_STATUS_SEND_COUNT = crawler.settings.get('NET_STATUS_SEND_COUNT')
        if sys.platform == 'win32':
            net_status = os.system(f'ping {NET_STATUS_HOST} -n {NET_STATUS_SEND_COUNT}')
        else:
            net_status = os.system(f'ping {NET_STATUS_HOST} -c {NET_STATUS_SEND_COUNT}')
        if net_status:
            raise logging.error(cls.name + ':网络中断，请检查是否联网！')
        try:
            REDIS_HOST = crawler.settings.get('REDIS_HOST')
            REDIS_PORT = crawler.settings.get('REDIS_PORT')
            REDIS_DB = crawler.settings.get('REDIS_DB')
            cls.redis = StrictRedis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
        except:
            logging.error(cls.name + ":启动REDIS失败！")
        return spider
