from sqlalchemy import Column, String, Integer, DateTime
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
import time

"""
connection config & engine for sqlalchemy
"""
connect_info = 'mysql+pymysql://root:123456@192.168.56.101:3306/gallery?charset=utf8'
engine = create_engine(connect_info,
                       echo=False, pool_size=100, pool_recycle=100,
                       pool_pre_ping=True)  #use sqlalchemy to build link-engine
# 创建DBSession类型:
DBSession = sessionmaker(bind=engine)
# 创建对象的基类:
Base = declarative_base()


class NovelInfo(Base):
    __tablename__ = 'crawled_novel'
    # 表的结构:
    id = Column(Integer(), primary_key=True, autoincrement=True)
    url = Column(String(2000))
    title = Column(String(2000))
    crawled_time = Column(DateTime())
    parent_url = Column(String(2000))

class DBOperator:
    def add_novel_info(self, novel_info):
        session = DBSession()
        # 添加到session:
        session.add(novel_info)
        # 提交即保存到数据库:
        session.commit()
        # # 关闭session:
        session.close()

    def check_novel_exist_count(self, novel_info):
        session = DBSession()
        result = session.query(NovelInfo).filter((NovelInfo.url == novel_info.url) |(NovelInfo.title == novel_info.title)).all()
        session.close()
        return len(result)


if __name__ == '__main__':
    novel_info = NovelInfo()
    novel_info.url ='ddd.html'
    novel_info.title = 'dddd'
    novel_info.parent_url = 'http://www.dddd.com'
    novel_info.crawled_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
    session = DBSession()
    session.add(novel_info)
    session.commit()
    session.close()
